import datetime
import time
import numpy as np
import pandas as pd
from packaging import version
from collections import Counter
from sklearn.metrics import confusion_matrix, classification_report
from sklearn.metrics import accuracy_score
from sklearn.metrics import mean_squared_error as MSE
from sklearn.model_selection import train_test_split
from sklearn.manifold import TSNE
import matplotlib.pyplot as plt
import matplotlib as mpl
import seaborn as sns
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import models, layers
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D, MaxPool2D, BatchNormalization, Dropout, Flatten, Dense
from tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping, TensorBoard
from tensorflow.keras.preprocessing import image
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.layers import Conv2D,MaxPool2D,Dense,Flatten,Dropout,Input, AveragePooling2D, Activation,Conv2D, MaxPooling2D, BatchNormalization,Concatenate
from tensorflow.keras.callbacks import EarlyStopping, TensorBoard
from tensorflow.keras import regularizers, optimizers
from tensorflow.keras.preprocessing.image import ImageDataGenerator
%matplotlib inline
np.set_printoptions(precision=3, suppress=True)
def get_three_classes(x, y):
def indices_of(class_id):
indices, _ = np.where(y == float(class_id))
return indices
indices = np.concatenate([indices_of(0), indices_of(1), indices_of(2)], axis=0)
x = x[indices]
y = y[indices]
count = x.shape[0]
indices = np.random.choice(range(count), count, replace=False)
x = x[indices]
y = y[indices]
y = tf.keras.utils.to_categorical(y)
return x, y
def show_random_examples(x, y, p):
indices = np.random.choice(range(x.shape[0]), 10, replace=False)
x = x[indices]
y = y[indices]
p = p[indices]
plt.figure(figsize=(10, 5))
for i in range(10):
plt.subplot(2, 5, i + 1)
plt.imshow(x[i])
plt.xticks([])
plt.yticks([])
col = 'green' if np.argmax(y[i]) == np.argmax(p[i]) else 'red'
plt.xlabel(class_names_preview[np.argmax(p[i])], color=col)
plt.show()
def plot_history(history):
losses = history.history['loss']
accs = history.history['accuracy']
val_losses = history.history['val_loss']
val_accs = history.history['val_accuracy']
epochs = len(losses)
plt.figure(figsize=(16, 4))
for i, metrics in enumerate(zip([losses, accs], [val_losses, val_accs], ['Loss', 'Accuracy'])):
plt.subplot(1, 2, i + 1)
plt.plot(range(epochs), metrics[0], label='Training {}'.format(metrics[2]))
plt.plot(range(epochs), metrics[1], label='Validation {}'.format(metrics[2]))
plt.legend()
plt.show()
def print_validation_report(y_test, predictions):
print("Classification Report")
print(classification_report(y_test, predictions))
print('Accuracy Score: {}'.format(accuracy_score(y_test, predictions)))
print('Root Mean Square Error: {}'.format(np.sqrt(MSE(y_test, predictions))))
def plot_confusion_matrix(y_true, y_pred):
mtx = confusion_matrix(y_true, y_pred)
fig, ax = plt.subplots(figsize=(8,8))
sns.heatmap(mtx, annot=True, fmt='d', linewidths=.75, cbar=False, ax=ax,cmap='Blues',linecolor='white')
# square=True,
plt.ylabel('true label')
plt.xlabel('predicted label')
(x_train, y_train), (x_test, y_test) = keras.datasets.cifar10.load_data()
Downloading data from https://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz 170498071/170498071 [==============================] - 14s 0us/step
print('train_images:\t{}'.format(x_train.shape))
print('train_labels:\t{}'.format(y_train.shape))
print('test_images:\t\t{}'.format(x_test.shape))
print('test_labels:\t\t{}'.format(y_test.shape))
train_images: (50000, 32, 32, 3) train_labels: (50000, 1) test_images: (10000, 32, 32, 3) test_labels: (10000, 1)
from tensorflow.keras.utils import to_categorical
# Normalizing
x_train=x_train/255
x_test=x_test/255
#One hot encoding
y_train_cat=to_categorical(y_train,10)
y_test_cat=to_categorical(y_test,10)
model1 = models.Sequential()
model1.add(keras.layers.Flatten(input_shape=[32, 32, 3]))
model1.add(layers.Dense(units=256, activation=tf.nn.relu))
model1.add(layers.Dense(units=10, activation=tf.nn.softmax))
model1.summary()
Model: "sequential_8"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
flatten_8 (Flatten) (None, 3072) 0
dense_16 (Dense) (None, 256) 786688
dense_17 (Dense) (None, 10) 2570
=================================================================
Total params: 789,258
Trainable params: 789,258
Non-trainable params: 0
_________________________________________________________________
keras.utils.plot_model(model1, "CIFAR10.png", show_shapes=True)
model1.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
start_time = datetime.datetime.now()
history1=model1.fit(x_train,y_train_cat,epochs=50,validation_data=(x_test,y_test_cat))
end_time = datetime.datetime.now()
Epoch 1/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.8795 - accuracy: 0.3298 - val_loss: 1.7787 - val_accuracy: 0.3585 Epoch 2/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.7245 - accuracy: 0.3847 - val_loss: 1.6940 - val_accuracy: 0.3884 Epoch 3/50 1563/1563 [==============================] - 5s 4ms/step - loss: 1.6646 - accuracy: 0.4081 - val_loss: 1.6485 - val_accuracy: 0.4171 Epoch 4/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6275 - accuracy: 0.4184 - val_loss: 1.6380 - val_accuracy: 0.4145 Epoch 5/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6024 - accuracy: 0.4291 - val_loss: 1.5932 - val_accuracy: 0.4409 Epoch 6/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5831 - accuracy: 0.4369 - val_loss: 1.6287 - val_accuracy: 0.4249 Epoch 7/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5640 - accuracy: 0.4410 - val_loss: 1.5850 - val_accuracy: 0.4291 Epoch 8/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5518 - accuracy: 0.4495 - val_loss: 1.5567 - val_accuracy: 0.4475 Epoch 9/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.5368 - accuracy: 0.4512 - val_loss: 1.5757 - val_accuracy: 0.4381 Epoch 10/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.5266 - accuracy: 0.4564 - val_loss: 1.5726 - val_accuracy: 0.4433 Epoch 11/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5161 - accuracy: 0.4595 - val_loss: 1.5651 - val_accuracy: 0.4460 Epoch 12/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5112 - accuracy: 0.4577 - val_loss: 1.5625 - val_accuracy: 0.4452 Epoch 13/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5012 - accuracy: 0.4649 - val_loss: 1.5370 - val_accuracy: 0.4532 Epoch 14/50 1563/1563 [==============================] - 8s 5ms/step - loss: 1.4947 - accuracy: 0.4683 - val_loss: 1.5424 - val_accuracy: 0.4555 Epoch 15/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.4884 - accuracy: 0.4696 - val_loss: 1.5334 - val_accuracy: 0.4584 Epoch 16/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.4826 - accuracy: 0.4710 - val_loss: 1.5678 - val_accuracy: 0.4386 Epoch 17/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.4741 - accuracy: 0.4749 - val_loss: 1.5227 - val_accuracy: 0.4583 Epoch 18/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4699 - accuracy: 0.4786 - val_loss: 1.5400 - val_accuracy: 0.4534 Epoch 19/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.4621 - accuracy: 0.4767 - val_loss: 1.5441 - val_accuracy: 0.4541 Epoch 20/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.4597 - accuracy: 0.4802 - val_loss: 1.5676 - val_accuracy: 0.4428 Epoch 21/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.4536 - accuracy: 0.4822 - val_loss: 1.5117 - val_accuracy: 0.4631 Epoch 22/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4508 - accuracy: 0.4836 - val_loss: 1.5416 - val_accuracy: 0.4545 Epoch 23/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4498 - accuracy: 0.4813 - val_loss: 1.5540 - val_accuracy: 0.4415 Epoch 24/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.4422 - accuracy: 0.4860 - val_loss: 1.5885 - val_accuracy: 0.4419 Epoch 25/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.4444 - accuracy: 0.4868 - val_loss: 1.5884 - val_accuracy: 0.4353 Epoch 26/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.4344 - accuracy: 0.4894 - val_loss: 1.6122 - val_accuracy: 0.4304 Epoch 27/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.4333 - accuracy: 0.4878 - val_loss: 1.5263 - val_accuracy: 0.4589 Epoch 28/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4308 - accuracy: 0.4898 - val_loss: 1.5621 - val_accuracy: 0.4468 Epoch 29/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4252 - accuracy: 0.4917 - val_loss: 1.5107 - val_accuracy: 0.4651 Epoch 30/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4229 - accuracy: 0.4943 - val_loss: 1.5529 - val_accuracy: 0.4504 Epoch 31/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4173 - accuracy: 0.4946 - val_loss: 1.5437 - val_accuracy: 0.4535 Epoch 32/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4128 - accuracy: 0.4967 - val_loss: 1.5216 - val_accuracy: 0.4597 Epoch 33/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.4138 - accuracy: 0.4951 - val_loss: 1.5654 - val_accuracy: 0.4445 Epoch 34/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4101 - accuracy: 0.4957 - val_loss: 1.5592 - val_accuracy: 0.4468 Epoch 35/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4092 - accuracy: 0.4967 - val_loss: 1.5422 - val_accuracy: 0.4594 Epoch 36/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.4058 - accuracy: 0.4973 - val_loss: 1.5498 - val_accuracy: 0.4502 Epoch 37/50 1563/1563 [==============================] - 5s 4ms/step - loss: 1.4017 - accuracy: 0.4982 - val_loss: 1.5173 - val_accuracy: 0.4651 Epoch 38/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4026 - accuracy: 0.4998 - val_loss: 1.5727 - val_accuracy: 0.4435 Epoch 39/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3970 - accuracy: 0.5003 - val_loss: 1.5130 - val_accuracy: 0.4692 Epoch 40/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3932 - accuracy: 0.5029 - val_loss: 1.5402 - val_accuracy: 0.4609 Epoch 41/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.3947 - accuracy: 0.5021 - val_loss: 1.5136 - val_accuracy: 0.4722 Epoch 42/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3921 - accuracy: 0.5056 - val_loss: 1.5521 - val_accuracy: 0.4597 Epoch 43/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.3893 - accuracy: 0.5039 - val_loss: 1.5313 - val_accuracy: 0.4647 Epoch 44/50 1563/1563 [==============================] - 5s 3ms/step - loss: 1.3889 - accuracy: 0.5038 - val_loss: 1.5491 - val_accuracy: 0.4589 Epoch 45/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3885 - accuracy: 0.5038 - val_loss: 1.5235 - val_accuracy: 0.4689 Epoch 46/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3823 - accuracy: 0.5069 - val_loss: 1.5416 - val_accuracy: 0.4574 Epoch 47/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3781 - accuracy: 0.5083 - val_loss: 1.5290 - val_accuracy: 0.4623 Epoch 48/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3820 - accuracy: 0.5073 - val_loss: 1.5366 - val_accuracy: 0.4594 Epoch 49/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3788 - accuracy: 0.5071 - val_loss: 1.5528 - val_accuracy: 0.4505 Epoch 50/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3766 - accuracy: 0.5092 - val_loss: 1.5361 - val_accuracy: 0.4677
time_to_predict1 = end_time-start_time
print('total elapsed time to predict: ' + str(time_to_predict1))
total elapsed time to predict: 0:05:23.411278
loss, accuracy = model1.evaluate(x_test, y_test_cat)
print('test set accuracy: ', accuracy * 100)
313/313 [==============================] - 1s 3ms/step - loss: 1.5361 - accuracy: 0.4677 test set accuracy: 46.77000045776367
preds1 = model1.predict(x_test)
print('shape of preds: ', preds1.shape)
313/313 [==============================] - 1s 2ms/step shape of preds: (10000, 10)
# Plotting Performance Metrics
history_dict1 = history1.history
history_dict1.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy'])
history_df1=pd.DataFrame(history_dict1)
history_df1.tail().round(3)
| loss | accuracy | val_loss | val_accuracy | |
|---|---|---|---|---|
| 45 | 1.382 | 0.507 | 1.542 | 0.457 |
| 46 | 1.378 | 0.508 | 1.529 | 0.462 |
| 47 | 1.382 | 0.507 | 1.537 | 0.459 |
| 48 | 1.379 | 0.507 | 1.553 | 0.451 |
| 49 | 1.377 | 0.509 | 1.536 | 0.468 |
losses1 = history1.history['loss']
accs1 = history1.history['accuracy']
val_losses1 = history1.history['val_loss']
val_accs1 = history1.history['val_accuracy']
epochs1 = len(losses1)
pd.DataFrame(history1.history).plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7f66b4090d10>
# Confusion Matrix
pred1= model1.predict(x_test)
pred1=np.argmax(pred1, axis=1)
313/313 [==============================] - 1s 2ms/step
print_validation_report(y_test, pred1)
Classification Report
precision recall f1-score support
0 0.64 0.43 0.52 1000
1 0.52 0.67 0.58 1000
2 0.40 0.29 0.34 1000
3 0.28 0.44 0.34 1000
4 0.41 0.40 0.40 1000
5 0.43 0.30 0.35 1000
6 0.53 0.40 0.46 1000
7 0.49 0.56 0.52 1000
8 0.60 0.63 0.61 1000
9 0.52 0.55 0.53 1000
accuracy 0.47 10000
macro avg 0.48 0.47 0.47 10000
weighted avg 0.48 0.47 0.47 10000
Accuracy Score: 0.4677
Root Mean Square Error: 3.1607277642973304
plot_confusion_matrix(y_test,pred1)
model2 = models.Sequential()
model2.add(keras.layers.Flatten(input_shape=[32, 32, 3]))
model2.add(layers.Dense(units=256, activation=tf.nn.relu))
model2.add(layers.Dense(units=512, activation=tf.nn.relu))
model2.add(layers.Dense(units=10, activation=tf.nn.softmax))
model2.summary()
Model: "sequential_9"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
flatten_9 (Flatten) (None, 3072) 0
dense_18 (Dense) (None, 256) 786688
dense_19 (Dense) (None, 512) 131584
dense_20 (Dense) (None, 10) 5130
=================================================================
Total params: 923,402
Trainable params: 923,402
Non-trainable params: 0
_________________________________________________________________
keras.utils.plot_model(model2, "CIFAR10.png", show_shapes=True)
model2.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
start_time = datetime.datetime.now()
history2=model2.fit(x_train,y_train_cat,epochs=50,validation_data=(x_test,y_test_cat))
end_time = datetime.datetime.now()
Epoch 1/50 1563/1563 [==============================] - 9s 5ms/step - loss: 1.8559 - accuracy: 0.3230 - val_loss: 1.6979 - val_accuracy: 0.3863 Epoch 2/50 1563/1563 [==============================] - 9s 6ms/step - loss: 1.6749 - accuracy: 0.3987 - val_loss: 1.6392 - val_accuracy: 0.4030 Epoch 3/50 1563/1563 [==============================] - 8s 5ms/step - loss: 1.6078 - accuracy: 0.4207 - val_loss: 1.5730 - val_accuracy: 0.4373 Epoch 4/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5730 - accuracy: 0.4336 - val_loss: 1.5594 - val_accuracy: 0.4434 Epoch 5/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5390 - accuracy: 0.4477 - val_loss: 1.5397 - val_accuracy: 0.4489 Epoch 6/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5177 - accuracy: 0.4546 - val_loss: 1.5484 - val_accuracy: 0.4453 Epoch 7/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4981 - accuracy: 0.4631 - val_loss: 1.5578 - val_accuracy: 0.4514 Epoch 8/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4822 - accuracy: 0.4679 - val_loss: 1.5100 - val_accuracy: 0.4625 Epoch 9/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4679 - accuracy: 0.4722 - val_loss: 1.5022 - val_accuracy: 0.4667 Epoch 10/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4597 - accuracy: 0.4780 - val_loss: 1.4960 - val_accuracy: 0.4666 Epoch 11/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4410 - accuracy: 0.4822 - val_loss: 1.5900 - val_accuracy: 0.4291 Epoch 12/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4363 - accuracy: 0.4829 - val_loss: 1.5395 - val_accuracy: 0.4450 Epoch 13/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4259 - accuracy: 0.4865 - val_loss: 1.4927 - val_accuracy: 0.4612 Epoch 14/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4143 - accuracy: 0.4918 - val_loss: 1.5182 - val_accuracy: 0.4598 Epoch 15/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4081 - accuracy: 0.4962 - val_loss: 1.5335 - val_accuracy: 0.4564 Epoch 16/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.4009 - accuracy: 0.4971 - val_loss: 1.5164 - val_accuracy: 0.4608 Epoch 17/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3957 - accuracy: 0.4967 - val_loss: 1.5041 - val_accuracy: 0.4671 Epoch 18/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3858 - accuracy: 0.5013 - val_loss: 1.5027 - val_accuracy: 0.4766 Epoch 19/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3739 - accuracy: 0.5047 - val_loss: 1.4945 - val_accuracy: 0.4734 Epoch 20/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3750 - accuracy: 0.5065 - val_loss: 1.5234 - val_accuracy: 0.4626 Epoch 21/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3664 - accuracy: 0.5099 - val_loss: 1.4963 - val_accuracy: 0.4801 Epoch 22/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3581 - accuracy: 0.5112 - val_loss: 1.4918 - val_accuracy: 0.4823 Epoch 23/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3523 - accuracy: 0.5124 - val_loss: 1.5363 - val_accuracy: 0.4619 Epoch 24/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3505 - accuracy: 0.5150 - val_loss: 1.5017 - val_accuracy: 0.4753 Epoch 25/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3461 - accuracy: 0.5158 - val_loss: 1.5118 - val_accuracy: 0.4735 Epoch 26/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3404 - accuracy: 0.5175 - val_loss: 1.5106 - val_accuracy: 0.4744 Epoch 27/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3369 - accuracy: 0.5200 - val_loss: 1.5103 - val_accuracy: 0.4734 Epoch 28/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3327 - accuracy: 0.5202 - val_loss: 1.5177 - val_accuracy: 0.4677 Epoch 29/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3243 - accuracy: 0.5232 - val_loss: 1.5769 - val_accuracy: 0.4564 Epoch 30/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3302 - accuracy: 0.5212 - val_loss: 1.5762 - val_accuracy: 0.4615 Epoch 31/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3199 - accuracy: 0.5245 - val_loss: 1.5205 - val_accuracy: 0.4742 Epoch 32/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3180 - accuracy: 0.5248 - val_loss: 1.5478 - val_accuracy: 0.4742 Epoch 33/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3138 - accuracy: 0.5271 - val_loss: 1.5487 - val_accuracy: 0.4667 Epoch 34/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3058 - accuracy: 0.5294 - val_loss: 1.5389 - val_accuracy: 0.4761 Epoch 35/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3071 - accuracy: 0.5296 - val_loss: 1.5357 - val_accuracy: 0.4705 Epoch 36/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.3038 - accuracy: 0.5304 - val_loss: 1.5448 - val_accuracy: 0.4794 Epoch 37/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2978 - accuracy: 0.5338 - val_loss: 1.5787 - val_accuracy: 0.4602 Epoch 38/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2956 - accuracy: 0.5338 - val_loss: 1.5880 - val_accuracy: 0.4659 Epoch 39/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2960 - accuracy: 0.5300 - val_loss: 1.5723 - val_accuracy: 0.4675 Epoch 40/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2885 - accuracy: 0.5321 - val_loss: 1.5300 - val_accuracy: 0.4754 Epoch 41/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2839 - accuracy: 0.5372 - val_loss: 1.5837 - val_accuracy: 0.4676 Epoch 42/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2884 - accuracy: 0.5348 - val_loss: 1.6061 - val_accuracy: 0.4671 Epoch 43/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2856 - accuracy: 0.5371 - val_loss: 1.5795 - val_accuracy: 0.4700 Epoch 44/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2785 - accuracy: 0.5395 - val_loss: 1.5481 - val_accuracy: 0.4768 Epoch 45/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2748 - accuracy: 0.5397 - val_loss: 1.5916 - val_accuracy: 0.4570 Epoch 46/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.2716 - accuracy: 0.5413 - val_loss: 1.5685 - val_accuracy: 0.4711 Epoch 47/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2650 - accuracy: 0.5411 - val_loss: 1.6104 - val_accuracy: 0.4632 Epoch 48/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2667 - accuracy: 0.5409 - val_loss: 1.5958 - val_accuracy: 0.4654 Epoch 49/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2639 - accuracy: 0.5451 - val_loss: 1.6240 - val_accuracy: 0.4559 Epoch 50/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.2640 - accuracy: 0.5418 - val_loss: 1.5858 - val_accuracy: 0.4732
time_to_predict2 = end_time-start_time
print('total elapsed time to predict: ' + str(time_to_predict2))
total elapsed time to predict: 0:05:23.342052
loss, accuracy = model2.evaluate(x_test, y_test_cat)
print('test set accuracy: ', accuracy * 100)
313/313 [==============================] - 1s 3ms/step - loss: 1.5858 - accuracy: 0.4732 test set accuracy: 47.31999933719635
preds2 = model2.predict(x_test)
print('shape of preds: ', preds2.shape)
313/313 [==============================] - 1s 2ms/step shape of preds: (10000, 10)
# Plotting Performance Metrics
history_dict2 = history2.history
history_dict2.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy'])
history_df2=pd.DataFrame(history_dict2)
history_df2.tail().round(3)
| loss | accuracy | val_loss | val_accuracy | |
|---|---|---|---|---|
| 45 | 1.272 | 0.541 | 1.569 | 0.471 |
| 46 | 1.265 | 0.541 | 1.610 | 0.463 |
| 47 | 1.267 | 0.541 | 1.596 | 0.465 |
| 48 | 1.264 | 0.545 | 1.624 | 0.456 |
| 49 | 1.264 | 0.542 | 1.586 | 0.473 |
losses2 = history2.history['loss']
accs2 = history2.history['accuracy']
val_losses2 = history2.history['val_loss']
val_accs2 = history2.history['val_accuracy']
epochs2 = len(losses2)
pd.DataFrame(history2.history).plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7f66b47ae1d0>
# Confusion Matrix
pred2= model2.predict(x_test)
pred2=np.argmax(pred2, axis=1)
313/313 [==============================] - 1s 2ms/step
print_validation_report(y_test, pred2)
Classification Report
precision recall f1-score support
0 0.49 0.61 0.54 1000
1 0.57 0.61 0.59 1000
2 0.37 0.29 0.32 1000
3 0.34 0.28 0.31 1000
4 0.40 0.37 0.39 1000
5 0.42 0.35 0.38 1000
6 0.49 0.50 0.50 1000
7 0.46 0.61 0.52 1000
8 0.63 0.56 0.59 1000
9 0.52 0.55 0.53 1000
accuracy 0.47 10000
macro avg 0.47 0.47 0.47 10000
weighted avg 0.47 0.47 0.47 10000
Accuracy Score: 0.4732
Root Mean Square Error: 3.185388516335174
plot_confusion_matrix(y_test,pred2)
model3=Sequential()
model3.add(Conv2D(filters=128,kernel_size=(3,3),input_shape=(32,32,3),activation='relu'))
model3.add(MaxPool2D(pool_size=(2,2)))
model3.add(Conv2D(filters=256,kernel_size=(3,3),input_shape=(32,32,3),activation='relu'))
model3.add(MaxPool2D(pool_size=(2,2)))
model3.add(Flatten())
model3.add(Dense(384,activation='relu'))
model3.add(Dense(10,activation='softmax'))
model3.summary()
Model: "sequential_10"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_56 (Conv2D) (None, 30, 30, 128) 3584
max_pooling2d_31 (MaxPoolin (None, 15, 15, 128) 0
g2D)
conv2d_57 (Conv2D) (None, 13, 13, 256) 295168
max_pooling2d_32 (MaxPoolin (None, 6, 6, 256) 0
g2D)
flatten_10 (Flatten) (None, 9216) 0
dense_21 (Dense) (None, 384) 3539328
dense_22 (Dense) (None, 10) 3850
=================================================================
Total params: 3,841,930
Trainable params: 3,841,930
Non-trainable params: 0
_________________________________________________________________
keras.utils.plot_model(model3, "CIFAR10.png", show_shapes=True)
model3.compile(loss='categorical_crossentropy',optimizer='adam',metrics=['accuracy'])
start_time = datetime.datetime.now()
history3=model3.fit(x_train,y_train_cat,epochs=50,validation_data=(x_test,y_test_cat))
end_time = datetime.datetime.now()
Epoch 1/50 1563/1563 [==============================] - 12s 7ms/step - loss: 1.3728 - accuracy: 0.5071 - val_loss: 1.1535 - val_accuracy: 0.5941 Epoch 2/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.9993 - accuracy: 0.6510 - val_loss: 1.0176 - val_accuracy: 0.6406 Epoch 3/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.8269 - accuracy: 0.7117 - val_loss: 0.9215 - val_accuracy: 0.6823 Epoch 4/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.6857 - accuracy: 0.7607 - val_loss: 0.9154 - val_accuracy: 0.6966 Epoch 5/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.5565 - accuracy: 0.8039 - val_loss: 0.9647 - val_accuracy: 0.6983 Epoch 6/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.4478 - accuracy: 0.8421 - val_loss: 0.9179 - val_accuracy: 0.7181 Epoch 7/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.3383 - accuracy: 0.8801 - val_loss: 1.0617 - val_accuracy: 0.7067 Epoch 8/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.2579 - accuracy: 0.9103 - val_loss: 1.1883 - val_accuracy: 0.6996 Epoch 9/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.1999 - accuracy: 0.9309 - val_loss: 1.3537 - val_accuracy: 0.7020 Epoch 10/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.1620 - accuracy: 0.9442 - val_loss: 1.4904 - val_accuracy: 0.6940 Epoch 11/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.1398 - accuracy: 0.9514 - val_loss: 1.6887 - val_accuracy: 0.6867 Epoch 12/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.1197 - accuracy: 0.9584 - val_loss: 1.8110 - val_accuracy: 0.6957 Epoch 13/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.1042 - accuracy: 0.9634 - val_loss: 1.9473 - val_accuracy: 0.6817 Epoch 14/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.1021 - accuracy: 0.9655 - val_loss: 1.9706 - val_accuracy: 0.6981 Epoch 15/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0868 - accuracy: 0.9708 - val_loss: 2.1039 - val_accuracy: 0.6900 Epoch 16/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0852 - accuracy: 0.9706 - val_loss: 2.1289 - val_accuracy: 0.6966 Epoch 17/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0806 - accuracy: 0.9720 - val_loss: 2.2622 - val_accuracy: 0.6926 Epoch 18/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0793 - accuracy: 0.9730 - val_loss: 2.3113 - val_accuracy: 0.6961 Epoch 19/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0758 - accuracy: 0.9748 - val_loss: 2.3508 - val_accuracy: 0.6962 Epoch 20/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0742 - accuracy: 0.9759 - val_loss: 2.5101 - val_accuracy: 0.6917 Epoch 21/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0621 - accuracy: 0.9792 - val_loss: 2.6310 - val_accuracy: 0.6835 Epoch 22/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0657 - accuracy: 0.9785 - val_loss: 2.6603 - val_accuracy: 0.6953 Epoch 23/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0657 - accuracy: 0.9794 - val_loss: 2.7907 - val_accuracy: 0.6792 Epoch 24/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0679 - accuracy: 0.9786 - val_loss: 2.8263 - val_accuracy: 0.6876 Epoch 25/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0556 - accuracy: 0.9820 - val_loss: 2.8720 - val_accuracy: 0.6868 Epoch 26/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0590 - accuracy: 0.9811 - val_loss: 2.8426 - val_accuracy: 0.6917 Epoch 27/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0608 - accuracy: 0.9817 - val_loss: 2.7757 - val_accuracy: 0.6960 Epoch 28/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0591 - accuracy: 0.9821 - val_loss: 2.9526 - val_accuracy: 0.6838 Epoch 29/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0548 - accuracy: 0.9827 - val_loss: 3.0241 - val_accuracy: 0.6850 Epoch 30/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0597 - accuracy: 0.9815 - val_loss: 3.1762 - val_accuracy: 0.6922 Epoch 31/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0467 - accuracy: 0.9852 - val_loss: 3.3103 - val_accuracy: 0.6840 Epoch 32/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0605 - accuracy: 0.9821 - val_loss: 3.2034 - val_accuracy: 0.6923 Epoch 33/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0543 - accuracy: 0.9838 - val_loss: 3.2726 - val_accuracy: 0.6850 Epoch 34/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0493 - accuracy: 0.9857 - val_loss: 3.3957 - val_accuracy: 0.6843 Epoch 35/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0539 - accuracy: 0.9836 - val_loss: 3.4309 - val_accuracy: 0.6852 Epoch 36/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0542 - accuracy: 0.9836 - val_loss: 3.4875 - val_accuracy: 0.6883 Epoch 37/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0535 - accuracy: 0.9847 - val_loss: 3.3978 - val_accuracy: 0.6850 Epoch 38/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0450 - accuracy: 0.9865 - val_loss: 3.5987 - val_accuracy: 0.6933 Epoch 39/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0576 - accuracy: 0.9836 - val_loss: 3.5961 - val_accuracy: 0.6938 Epoch 40/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0404 - accuracy: 0.9881 - val_loss: 3.7712 - val_accuracy: 0.6877 Epoch 41/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0500 - accuracy: 0.9860 - val_loss: 3.7090 - val_accuracy: 0.6914 Epoch 42/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0477 - accuracy: 0.9864 - val_loss: 3.6900 - val_accuracy: 0.6860 Epoch 43/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0518 - accuracy: 0.9853 - val_loss: 3.6021 - val_accuracy: 0.6891 Epoch 44/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0419 - accuracy: 0.9876 - val_loss: 3.7690 - val_accuracy: 0.6879 Epoch 45/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0466 - accuracy: 0.9865 - val_loss: 3.8569 - val_accuracy: 0.6883 Epoch 46/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0431 - accuracy: 0.9876 - val_loss: 3.9237 - val_accuracy: 0.6854 Epoch 47/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0456 - accuracy: 0.9876 - val_loss: 3.9276 - val_accuracy: 0.6868 Epoch 48/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0452 - accuracy: 0.9869 - val_loss: 4.0238 - val_accuracy: 0.6911 Epoch 49/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0380 - accuracy: 0.9883 - val_loss: 4.1610 - val_accuracy: 0.6869 Epoch 50/50 1563/1563 [==============================] - 11s 7ms/step - loss: 0.0508 - accuracy: 0.9868 - val_loss: 4.2315 - val_accuracy: 0.6876
time_to_predict3 = end_time-start_time
print('total elapsed time to predict: ' + str(time_to_predict3))
total elapsed time to predict: 0:09:23.486721
loss, accuracy = model3.evaluate(x_test, y_test_cat)
print('test set accuracy: ', accuracy * 100)
313/313 [==============================] - 1s 4ms/step - loss: 4.2315 - accuracy: 0.6876 test set accuracy: 68.76000165939331
# Predictions
preds3 = model3.predict(x_test)
print('shape of preds: ', preds3.shape)
313/313 [==============================] - 1s 3ms/step shape of preds: (10000, 10)
# Plot Prediction Metrics
history3_dict = history3.history
history3_dict.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy'])
history3_df=pd.DataFrame(history3_dict)
history3_df.tail().round(3)
| loss | accuracy | val_loss | val_accuracy | |
|---|---|---|---|---|
| 45 | 0.043 | 0.988 | 3.924 | 0.685 |
| 46 | 0.046 | 0.988 | 3.928 | 0.687 |
| 47 | 0.045 | 0.987 | 4.024 | 0.691 |
| 48 | 0.038 | 0.988 | 4.161 | 0.687 |
| 49 | 0.051 | 0.987 | 4.231 | 0.688 |
losses3 = history3.history['loss']
accs3 = history3.history['accuracy']
val_losses3 = history3.history['val_loss']
val_accs3 = history3.history['val_accuracy']
epochs3 = len(losses3)
pd.DataFrame(history3.history).plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7f665a099550>
# Confusion Matrix
pred3= model3.predict(x_test)
pred3=np.argmax(pred3, axis=1)
313/313 [==============================] - 1s 3ms/step
print_validation_report(y_test, pred3)
Classification Report
precision recall f1-score support
0 0.71 0.74 0.73 1000
1 0.86 0.76 0.80 1000
2 0.57 0.62 0.60 1000
3 0.50 0.53 0.51 1000
4 0.62 0.64 0.63 1000
5 0.63 0.52 0.57 1000
6 0.72 0.79 0.75 1000
7 0.77 0.72 0.75 1000
8 0.73 0.85 0.78 1000
9 0.83 0.70 0.76 1000
accuracy 0.69 10000
macro avg 0.69 0.69 0.69 10000
weighted avg 0.69 0.69 0.69 10000
Accuracy Score: 0.6876
Root Mean Square Error: 2.2981296743221433
plot_confusion_matrix(y_test,pred3)
model3 = tf.keras.models.load_model('CNN_model3.h5')
preds3 = model3.predict(x_test)
313/313 [==============================] - 1s 3ms/step
preds3.shape
(10000, 10)
# Predictions
cm = sns.light_palette((260, 75, 60), input="husl", as_cmap=True)
df3 = pd.DataFrame(preds3[0:20], columns = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck'])
df3.style.format("{:.2%}").background_gradient(cmap=cm)
| airplane | automobile | bird | cat | deer | dog | frog | horse | ship | truck | |
|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.01% | 0.06% | 0.01% | 95.61% | 0.01% | 0.78% | 0.38% | 0.02% | 2.98% | 0.15% |
| 1 | 0.00% | 0.01% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 99.96% | 0.02% |
| 2 | 1.56% | 0.91% | 0.00% | 0.01% | 0.00% | 0.00% | 0.00% | 0.03% | 96.50% | 0.98% |
| 3 | 98.43% | 0.43% | 0.04% | 0.07% | 0.02% | 0.00% | 0.00% | 0.06% | 0.85% | 0.11% |
| 4 | 0.00% | 0.00% | 0.09% | 0.25% | 88.18% | 0.01% | 11.48% | 0.00% | 0.00% | 0.00% |
| 5 | 0.00% | 0.00% | 5.85% | 0.18% | 0.10% | 1.82% | 92.05% | 0.00% | 0.00% | 0.00% |
| 6 | 0.00% | 99.89% | 0.00% | 0.08% | 0.00% | 0.02% | 0.01% | 0.00% | 0.00% | 0.00% |
| 7 | 0.22% | 0.00% | 7.78% | 0.59% | 0.09% | 0.60% | 90.46% | 0.04% | 0.00% | 0.21% |
| 8 | 0.01% | 0.00% | 0.19% | 94.93% | 2.15% | 1.93% | 0.27% | 0.52% | 0.00% | 0.00% |
| 9 | 0.00% | 80.11% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.01% | 19.88% |
| 10 | 41.75% | 0.01% | 1.07% | 2.33% | 47.06% | 1.42% | 0.02% | 1.10% | 5.12% | 0.12% |
| 11 | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 100.00% |
| 12 | 0.01% | 0.01% | 0.35% | 2.08% | 1.53% | 95.72% | 0.25% | 0.02% | 0.04% | 0.00% |
| 13 | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 100.00% | 0.00% | 0.00% |
| 14 | 0.00% | 0.06% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 99.94% |
| 15 | 0.00% | 0.02% | 0.00% | 0.01% | 0.01% | 0.00% | 2.15% | 0.00% | 97.81% | 0.00% |
| 16 | 0.00% | 0.00% | 0.00% | 0.42% | 0.00% | 99.56% | 0.00% | 0.01% | 0.00% | 0.00% |
| 17 | 0.01% | 0.00% | 3.99% | 3.62% | 0.38% | 2.24% | 1.44% | 87.79% | 0.02% | 0.51% |
| 18 | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 99.99% | 0.00% |
| 19 | 0.00% | 0.00% | 0.00% | 0.00% | 0.21% | 0.07% | 99.72% | 0.00% | 0.00% | 0.00% |
(_,_), (test_images, test_labels) = tf.keras.datasets.cifar10.load_data()
img = test_images[2004]
img_tensor = image.img_to_array(img)
img_tensor = np.expand_dims(img_tensor, axis=0)
class_names = ['airplane'
,'automobile'
,'bird'
,'cat'
,'deer'
,'dog'
,'frog'
,'horse'
,'ship'
,'truck']
plt.imshow(img, cmap='viridis')
plt.axis('off')
plt.show()
# Extracts the outputs of the top 8 layers:
layer_outputs3 = [layer.output for layer in model3.layers[:8]]
# Creates a model that will return these outputs, given the model input:
activation_model3 = models.Model(inputs=model3.input, outputs=layer_outputs3)
activations3 = activation_model3.predict(img_tensor)
len(activations3)
1/1 [==============================] - 0s 87ms/step
7
layer_names3 = []
for layer in model3.layers:
layer_names3.append(layer.name)
layer_names3
['conv2d_25', 'max_pooling2d_25', 'conv2d_26', 'max_pooling2d_26', 'flatten_11', 'dense_22', 'dense_23']
# These are the names of the layers, so can have them as part of our plot
layer_names3 = []
for layer in model3.layers[:3]:
layer_names3.append(layer.name)
images_per_row = 16
# Now let's display our feature maps
for layer_name, layer_activation3 in zip(layer_names3, activations3):
# This is the number of features in the feature map
n_features = layer_activation3.shape[-1]
# The feature map has shape (1, size, size, n_features)
size = layer_activation3.shape[1]
# We will tile the activation channels in this matrix
n_cols = n_features // images_per_row
display_grid = np.zeros((size * n_cols, images_per_row * size))
# We'll tile each filter into this big horizontal grid
for col in range(n_cols):
for row in range(images_per_row):
channel_image = layer_activation3[0,
:, :,
col * images_per_row + row]
# Post-process the feature to make it visually palatable
channel_image -= channel_image.mean()
channel_image /= channel_image.std()
channel_image *= 64
channel_image += 128
channel_image = np.clip(channel_image, 0, 255).astype('uint8')
display_grid[col * size : (col + 1) * size,
row * size : (row + 1) * size] = channel_image
# Display the grid
scale = 1. / size
plt.figure(figsize=(scale * display_grid.shape[1],
scale * display_grid.shape[0]))
plt.title(layer_name)
plt.grid(False)
plt.imshow(display_grid, aspect='auto', cmap='viridis')
plt.show();
/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:28: RuntimeWarning: invalid value encountered in true_divide
model4=Sequential()
model4.add(Conv2D(filters=128,kernel_size=(3,3),input_shape=(32,32,3),activation='relu'))
model4.add(MaxPool2D(pool_size=(2,2)))
model4.add(Conv2D(filters=256,kernel_size=(3,3),input_shape=(32,32,3),activation='relu'))
model4.add(MaxPool2D(pool_size=(2,2)))
model4.add(Conv2D(filters=512,kernel_size=(3,3),input_shape=(32,32,3),activation='relu'))
model4.add(MaxPool2D(pool_size=(2,2)))
model4.add(Flatten())
model4.add(Dense(384,activation='relu'))
model4.add(Dense(10,activation='softmax'))
model4.summary()
Model: "sequential_11"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_58 (Conv2D) (None, 30, 30, 128) 3584
max_pooling2d_33 (MaxPoolin (None, 15, 15, 128) 0
g2D)
conv2d_59 (Conv2D) (None, 13, 13, 256) 295168
max_pooling2d_34 (MaxPoolin (None, 6, 6, 256) 0
g2D)
conv2d_60 (Conv2D) (None, 4, 4, 512) 1180160
max_pooling2d_35 (MaxPoolin (None, 2, 2, 512) 0
g2D)
flatten_11 (Flatten) (None, 2048) 0
dense_23 (Dense) (None, 384) 786816
dense_24 (Dense) (None, 10) 3850
=================================================================
Total params: 2,269,578
Trainable params: 2,269,578
Non-trainable params: 0
_________________________________________________________________
keras.utils.plot_model(model4, "CIFAR10.png", show_shapes=True)
model4.compile(loss='categorical_crossentropy',optimizer='adam',metrics=['accuracy'])
start_time = datetime.datetime.now()
history4=model4.fit(x_train,y_train_cat,epochs=50,validation_data=(x_test,y_test_cat))
end_time = datetime.datetime.now()
Epoch 1/50 1563/1563 [==============================] - 14s 9ms/step - loss: 1.3798 - accuracy: 0.5012 - val_loss: 1.0840 - val_accuracy: 0.6148 Epoch 2/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.9482 - accuracy: 0.6670 - val_loss: 0.8709 - val_accuracy: 0.7005 Epoch 3/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.7554 - accuracy: 0.7375 - val_loss: 0.8693 - val_accuracy: 0.7006 Epoch 4/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.6142 - accuracy: 0.7858 - val_loss: 0.7907 - val_accuracy: 0.7316 Epoch 5/50 1563/1563 [==============================] - 13s 9ms/step - loss: 0.4935 - accuracy: 0.8273 - val_loss: 0.8071 - val_accuracy: 0.7358 Epoch 6/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.3927 - accuracy: 0.8617 - val_loss: 0.8734 - val_accuracy: 0.7354 Epoch 7/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.3090 - accuracy: 0.8909 - val_loss: 0.9005 - val_accuracy: 0.7457 Epoch 8/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.2438 - accuracy: 0.9131 - val_loss: 1.0167 - val_accuracy: 0.7295 Epoch 9/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.1990 - accuracy: 0.9303 - val_loss: 1.1687 - val_accuracy: 0.7311 Epoch 10/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.1701 - accuracy: 0.9411 - val_loss: 1.3130 - val_accuracy: 0.7196 Epoch 11/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.1433 - accuracy: 0.9504 - val_loss: 1.3364 - val_accuracy: 0.7189 Epoch 12/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.1401 - accuracy: 0.9516 - val_loss: 1.3653 - val_accuracy: 0.7346 Epoch 13/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.1230 - accuracy: 0.9585 - val_loss: 1.4539 - val_accuracy: 0.7338 Epoch 14/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.1252 - accuracy: 0.9579 - val_loss: 1.4695 - val_accuracy: 0.7167 Epoch 15/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.1106 - accuracy: 0.9627 - val_loss: 1.5513 - val_accuracy: 0.7194 Epoch 16/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.1075 - accuracy: 0.9640 - val_loss: 1.7596 - val_accuracy: 0.7251 Epoch 17/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.1037 - accuracy: 0.9643 - val_loss: 1.6435 - val_accuracy: 0.7334 Epoch 18/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.1003 - accuracy: 0.9663 - val_loss: 1.7434 - val_accuracy: 0.7172 Epoch 19/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0932 - accuracy: 0.9688 - val_loss: 1.9061 - val_accuracy: 0.7229 Epoch 20/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0865 - accuracy: 0.9717 - val_loss: 1.7964 - val_accuracy: 0.7290 Epoch 21/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0875 - accuracy: 0.9707 - val_loss: 1.9786 - val_accuracy: 0.7294 Epoch 22/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0922 - accuracy: 0.9704 - val_loss: 1.8723 - val_accuracy: 0.7277 Epoch 23/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0841 - accuracy: 0.9730 - val_loss: 1.9559 - val_accuracy: 0.7285 Epoch 24/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0822 - accuracy: 0.9742 - val_loss: 2.2104 - val_accuracy: 0.7210 Epoch 25/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0881 - accuracy: 0.9735 - val_loss: 2.0452 - val_accuracy: 0.7269 Epoch 26/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0844 - accuracy: 0.9737 - val_loss: 2.1023 - val_accuracy: 0.7333 Epoch 27/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0823 - accuracy: 0.9757 - val_loss: 2.0971 - val_accuracy: 0.7234 Epoch 28/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0784 - accuracy: 0.9750 - val_loss: 2.2039 - val_accuracy: 0.7301 Epoch 29/50 1563/1563 [==============================] - 13s 9ms/step - loss: 0.0729 - accuracy: 0.9775 - val_loss: 2.3163 - val_accuracy: 0.7280 Epoch 30/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0827 - accuracy: 0.9752 - val_loss: 2.1427 - val_accuracy: 0.7248 Epoch 31/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0710 - accuracy: 0.9780 - val_loss: 2.3146 - val_accuracy: 0.7334 Epoch 32/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0737 - accuracy: 0.9780 - val_loss: 2.3479 - val_accuracy: 0.7302 Epoch 33/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0710 - accuracy: 0.9788 - val_loss: 2.2817 - val_accuracy: 0.7241 Epoch 34/50 1563/1563 [==============================] - 13s 9ms/step - loss: 0.0758 - accuracy: 0.9776 - val_loss: 2.4949 - val_accuracy: 0.7222 Epoch 35/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0712 - accuracy: 0.9793 - val_loss: 2.2759 - val_accuracy: 0.7247 Epoch 36/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0700 - accuracy: 0.9791 - val_loss: 2.4344 - val_accuracy: 0.7249 Epoch 37/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0672 - accuracy: 0.9805 - val_loss: 2.5139 - val_accuracy: 0.7227 Epoch 38/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0641 - accuracy: 0.9811 - val_loss: 2.3936 - val_accuracy: 0.7317 Epoch 39/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0676 - accuracy: 0.9801 - val_loss: 2.4543 - val_accuracy: 0.7252 Epoch 40/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0645 - accuracy: 0.9816 - val_loss: 2.6076 - val_accuracy: 0.7317 Epoch 41/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0652 - accuracy: 0.9811 - val_loss: 2.8564 - val_accuracy: 0.7263 Epoch 42/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0702 - accuracy: 0.9802 - val_loss: 2.7755 - val_accuracy: 0.7237 Epoch 43/50 1563/1563 [==============================] - 13s 9ms/step - loss: 0.0703 - accuracy: 0.9800 - val_loss: 2.7498 - val_accuracy: 0.7244 Epoch 44/50 1563/1563 [==============================] - 13s 9ms/step - loss: 0.0598 - accuracy: 0.9828 - val_loss: 2.7608 - val_accuracy: 0.7271 Epoch 45/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0602 - accuracy: 0.9828 - val_loss: 2.7102 - val_accuracy: 0.7291 Epoch 46/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0597 - accuracy: 0.9837 - val_loss: 2.8806 - val_accuracy: 0.7310 Epoch 47/50 1563/1563 [==============================] - 13s 9ms/step - loss: 0.0601 - accuracy: 0.9838 - val_loss: 2.9033 - val_accuracy: 0.7228 Epoch 48/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0683 - accuracy: 0.9821 - val_loss: 2.9146 - val_accuracy: 0.7343 Epoch 49/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0683 - accuracy: 0.9827 - val_loss: 3.0836 - val_accuracy: 0.7176 Epoch 50/50 1563/1563 [==============================] - 13s 8ms/step - loss: 0.0594 - accuracy: 0.9842 - val_loss: 2.9517 - val_accuracy: 0.7260
time_to_predict4 = end_time-start_time
print('total elapsed time to predict: ' + str(time_to_predict4))
total elapsed time to predict: 0:11:23.308001
loss, accuracy = model4.evaluate(x_test, y_test_cat)
print('test set accuracy: ', accuracy * 100)
313/313 [==============================] - 2s 5ms/step - loss: 2.9517 - accuracy: 0.7260 test set accuracy: 72.60000109672546
# Predictions
preds4 = model4.predict(x_test)
print('shape of preds: ', preds4.shape)
313/313 [==============================] - 1s 3ms/step shape of preds: (10000, 10)
# Plot Prediction Metrics
history4_dict = history4.history
history4_dict.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy'])
history4_df=pd.DataFrame(history4_dict)
history4_df.tail().round(3)
| loss | accuracy | val_loss | val_accuracy | |
|---|---|---|---|---|
| 45 | 0.060 | 0.984 | 2.881 | 0.731 |
| 46 | 0.060 | 0.984 | 2.903 | 0.723 |
| 47 | 0.068 | 0.982 | 2.915 | 0.734 |
| 48 | 0.068 | 0.983 | 3.084 | 0.718 |
| 49 | 0.059 | 0.984 | 2.952 | 0.726 |
losses4 = history4.history['loss']
accs4 = history4.history['accuracy']
val_losses4 = history4.history['val_loss']
val_accs4 = history4.history['val_accuracy']
epochs4 = len(losses4)
pd.DataFrame(history4.history).plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7f665a0a0210>
# Confusion Matrix
pred4 = model4.predict(x_test)
pred4 =np.argmax(pred4, axis=1)
313/313 [==============================] - 1s 3ms/step
print_validation_report(y_test, pred4)
Classification Report
precision recall f1-score support
0 0.75 0.78 0.76 1000
1 0.89 0.82 0.86 1000
2 0.59 0.65 0.62 1000
3 0.56 0.52 0.54 1000
4 0.68 0.68 0.68 1000
5 0.62 0.62 0.62 1000
6 0.78 0.78 0.78 1000
7 0.78 0.76 0.77 1000
8 0.79 0.85 0.82 1000
9 0.82 0.79 0.81 1000
accuracy 0.73 10000
macro avg 0.73 0.73 0.73 10000
weighted avg 0.73 0.73 0.73 10000
Accuracy Score: 0.726
Root Mean Square Error: 2.1103554203024664
plot_confusion_matrix(y_test,pred4)
model5 = models.Sequential()
model5.add(keras.layers.Flatten(input_shape=[32, 32, 3]))
model5.add(layers.Dense(units=256, activation=tf.nn.relu,kernel_regularizer=tf.keras.regularizers.L2(0.001)))
model5.add(layers.Dense(units=10, activation=tf.nn.softmax))
model5.summary()
Model: "sequential_12"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
flatten_12 (Flatten) (None, 3072) 0
dense_25 (Dense) (None, 256) 786688
dense_26 (Dense) (None, 10) 2570
=================================================================
Total params: 789,258
Trainable params: 789,258
Non-trainable params: 0
_________________________________________________________________
keras.utils.plot_model(model5, "CIFAR10.png", show_shapes=True)
model5.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
start_time = datetime.datetime.now()
history5=model5.fit(x_train,y_train_cat,epochs=50,validation_data=(x_test,y_test_cat))
end_time = datetime.datetime.now()
Epoch 1/50 1563/1563 [==============================] - 7s 4ms/step - loss: 2.0438 - accuracy: 0.3197 - val_loss: 1.8925 - val_accuracy: 0.3451 Epoch 2/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.8099 - accuracy: 0.3773 - val_loss: 1.8267 - val_accuracy: 0.3668 Epoch 3/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.7766 - accuracy: 0.3871 - val_loss: 1.7390 - val_accuracy: 0.4034 Epoch 4/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.7529 - accuracy: 0.4005 - val_loss: 1.7219 - val_accuracy: 0.4187 Epoch 5/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.7347 - accuracy: 0.4095 - val_loss: 1.7188 - val_accuracy: 0.4166 Epoch 6/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.7244 - accuracy: 0.4118 - val_loss: 1.6876 - val_accuracy: 0.4308 Epoch 7/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.7139 - accuracy: 0.4163 - val_loss: 1.7102 - val_accuracy: 0.4221 Epoch 8/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.7093 - accuracy: 0.4174 - val_loss: 1.8146 - val_accuracy: 0.3989 Epoch 9/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.7018 - accuracy: 0.4224 - val_loss: 1.6852 - val_accuracy: 0.4306 Epoch 10/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.7011 - accuracy: 0.4230 - val_loss: 1.6886 - val_accuracy: 0.4269 Epoch 11/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6931 - accuracy: 0.4267 - val_loss: 1.7354 - val_accuracy: 0.4020 Epoch 12/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6939 - accuracy: 0.4238 - val_loss: 1.7781 - val_accuracy: 0.3911 Epoch 13/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6935 - accuracy: 0.4230 - val_loss: 1.7524 - val_accuracy: 0.4032 Epoch 14/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6853 - accuracy: 0.4310 - val_loss: 1.7443 - val_accuracy: 0.4043 Epoch 15/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6891 - accuracy: 0.4271 - val_loss: 1.6847 - val_accuracy: 0.4293 Epoch 16/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6834 - accuracy: 0.4273 - val_loss: 1.7259 - val_accuracy: 0.4110 Epoch 17/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6931 - accuracy: 0.4277 - val_loss: 1.7736 - val_accuracy: 0.4105 Epoch 18/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6861 - accuracy: 0.4278 - val_loss: 1.7110 - val_accuracy: 0.4249 Epoch 19/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6994 - accuracy: 0.4245 - val_loss: 1.7158 - val_accuracy: 0.4109 Epoch 20/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6882 - accuracy: 0.4272 - val_loss: 1.6418 - val_accuracy: 0.4445 Epoch 21/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6848 - accuracy: 0.4274 - val_loss: 1.6740 - val_accuracy: 0.4298 Epoch 22/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6885 - accuracy: 0.4298 - val_loss: 1.7050 - val_accuracy: 0.4247 Epoch 23/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6871 - accuracy: 0.4286 - val_loss: 1.6443 - val_accuracy: 0.4460 Epoch 24/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6866 - accuracy: 0.4273 - val_loss: 1.6660 - val_accuracy: 0.4345 Epoch 25/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6801 - accuracy: 0.4325 - val_loss: 1.6985 - val_accuracy: 0.4254 Epoch 26/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6781 - accuracy: 0.4301 - val_loss: 1.6507 - val_accuracy: 0.4360 Epoch 27/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6914 - accuracy: 0.4289 - val_loss: 1.6707 - val_accuracy: 0.4303 Epoch 28/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6890 - accuracy: 0.4261 - val_loss: 1.6866 - val_accuracy: 0.4285 Epoch 29/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6774 - accuracy: 0.4322 - val_loss: 1.6822 - val_accuracy: 0.4296 Epoch 30/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6863 - accuracy: 0.4279 - val_loss: 1.7126 - val_accuracy: 0.4185 Epoch 31/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6835 - accuracy: 0.4284 - val_loss: 1.6657 - val_accuracy: 0.4450 Epoch 32/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6834 - accuracy: 0.4297 - val_loss: 1.7038 - val_accuracy: 0.4223 Epoch 33/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6915 - accuracy: 0.4267 - val_loss: 1.6617 - val_accuracy: 0.4353 Epoch 34/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6940 - accuracy: 0.4267 - val_loss: 1.7711 - val_accuracy: 0.3965 Epoch 35/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.7029 - accuracy: 0.4211 - val_loss: 1.6931 - val_accuracy: 0.4223 Epoch 36/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6933 - accuracy: 0.4222 - val_loss: 1.6959 - val_accuracy: 0.4169 Epoch 37/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.7018 - accuracy: 0.4191 - val_loss: 1.6807 - val_accuracy: 0.4256 Epoch 38/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6921 - accuracy: 0.4236 - val_loss: 1.6734 - val_accuracy: 0.4308 Epoch 39/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6971 - accuracy: 0.4220 - val_loss: 1.7261 - val_accuracy: 0.4134 Epoch 40/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6882 - accuracy: 0.4237 - val_loss: 1.7153 - val_accuracy: 0.4092 Epoch 41/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6900 - accuracy: 0.4235 - val_loss: 1.6797 - val_accuracy: 0.4251 Epoch 42/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6932 - accuracy: 0.4227 - val_loss: 1.6857 - val_accuracy: 0.4211 Epoch 43/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6911 - accuracy: 0.4233 - val_loss: 1.6997 - val_accuracy: 0.4154 Epoch 44/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6912 - accuracy: 0.4218 - val_loss: 1.6785 - val_accuracy: 0.4215 Epoch 45/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6872 - accuracy: 0.4218 - val_loss: 1.6861 - val_accuracy: 0.4130 Epoch 46/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6919 - accuracy: 0.4210 - val_loss: 1.6377 - val_accuracy: 0.4444 Epoch 47/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6886 - accuracy: 0.4240 - val_loss: 1.6878 - val_accuracy: 0.4242 Epoch 48/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6906 - accuracy: 0.4254 - val_loss: 1.7012 - val_accuracy: 0.4191 Epoch 49/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6916 - accuracy: 0.4219 - val_loss: 1.7128 - val_accuracy: 0.4048 Epoch 50/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6928 - accuracy: 0.4246 - val_loss: 1.6779 - val_accuracy: 0.4341
time_to_predict5 = end_time-start_time
print('total elapsed time to predict: ' + str(time_to_predict5))
total elapsed time to predict: 0:05:23.185763
loss, accuracy = model5.evaluate(x_test, y_test_cat)
print('test set accuracy: ', accuracy * 100)
313/313 [==============================] - 1s 3ms/step - loss: 1.6779 - accuracy: 0.4341 test set accuracy: 43.41000020503998
preds5 = model5.predict(x_test)
print('shape of preds: ', preds5.shape)
313/313 [==============================] - 1s 2ms/step shape of preds: (10000, 10)
# Plotting Performance Metrics
history_dict5 = history5.history
history_dict5.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy'])
history_df5=pd.DataFrame(history_dict5)
history_df5.tail().round(3)
| loss | accuracy | val_loss | val_accuracy | |
|---|---|---|---|---|
| 45 | 1.692 | 0.421 | 1.638 | 0.444 |
| 46 | 1.689 | 0.424 | 1.688 | 0.424 |
| 47 | 1.691 | 0.425 | 1.701 | 0.419 |
| 48 | 1.692 | 0.422 | 1.713 | 0.405 |
| 49 | 1.693 | 0.425 | 1.678 | 0.434 |
losses5 = history5.history['loss']
accs5 = history5.history['accuracy']
val_losses5 = history5.history['val_loss']
val_accs5 = history5.history['val_accuracy']
epochs5 = len(losses5)
pd.DataFrame(history5.history).plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7f665a4d3d90>
# Confusion Matrix
pred5= model5.predict(x_test)
pred5=np.argmax(pred5, axis=1)
313/313 [==============================] - 1s 2ms/step
plot_confusion_matrix(y_test,pred5)
model6 = models.Sequential()
model6.add(keras.layers.Flatten(input_shape=[32, 32, 3]))
model6.add(layers.Dense(units=256, activation=tf.nn.relu, kernel_regularizer=tf.keras.regularizers.L2(0.001)))
model6.add(layers.Dense(units=512, activation=tf.nn.relu))
model6.add(layers.Dense(units=10, activation=tf.nn.softmax))
model6.summary()
Model: "sequential_13"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
flatten_13 (Flatten) (None, 3072) 0
dense_27 (Dense) (None, 256) 786688
dense_28 (Dense) (None, 512) 131584
dense_29 (Dense) (None, 10) 5130
=================================================================
Total params: 923,402
Trainable params: 923,402
Non-trainable params: 0
_________________________________________________________________
keras.utils.plot_model(model6, "CIFAR10.png", show_shapes=True)
model6.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
start_time = datetime.datetime.now()
history6=model6.fit(x_train,y_train_cat,epochs=50,validation_data=(x_test,y_test_cat))
end_time = datetime.datetime.now()
Epoch 1/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.9618 - accuracy: 0.3201 - val_loss: 1.7824 - val_accuracy: 0.3753 Epoch 2/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.7560 - accuracy: 0.3840 - val_loss: 1.7026 - val_accuracy: 0.4036 Epoch 3/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.7111 - accuracy: 0.4012 - val_loss: 1.6598 - val_accuracy: 0.4215 Epoch 4/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.6812 - accuracy: 0.4177 - val_loss: 1.6942 - val_accuracy: 0.4152 Epoch 5/50 1563/1563 [==============================] - 8s 5ms/step - loss: 1.6515 - accuracy: 0.4272 - val_loss: 1.6705 - val_accuracy: 0.4185 Epoch 6/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.6342 - accuracy: 0.4353 - val_loss: 1.6625 - val_accuracy: 0.4223 Epoch 7/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.6243 - accuracy: 0.4376 - val_loss: 1.6382 - val_accuracy: 0.4388 Epoch 8/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.6197 - accuracy: 0.4403 - val_loss: 1.6041 - val_accuracy: 0.4511 Epoch 9/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.6073 - accuracy: 0.4438 - val_loss: 1.6356 - val_accuracy: 0.4291 Epoch 10/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5993 - accuracy: 0.4486 - val_loss: 1.5729 - val_accuracy: 0.4585 Epoch 11/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5994 - accuracy: 0.4467 - val_loss: 1.6182 - val_accuracy: 0.4444 Epoch 12/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5954 - accuracy: 0.4512 - val_loss: 1.5724 - val_accuracy: 0.4640 Epoch 13/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5862 - accuracy: 0.4527 - val_loss: 1.6058 - val_accuracy: 0.4484 Epoch 14/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5816 - accuracy: 0.4541 - val_loss: 1.6643 - val_accuracy: 0.4339 Epoch 15/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5796 - accuracy: 0.4565 - val_loss: 1.6294 - val_accuracy: 0.4433 Epoch 16/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5798 - accuracy: 0.4554 - val_loss: 1.5986 - val_accuracy: 0.4605 Epoch 17/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5764 - accuracy: 0.4566 - val_loss: 1.6071 - val_accuracy: 0.4465 Epoch 18/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5726 - accuracy: 0.4587 - val_loss: 1.6080 - val_accuracy: 0.4421 Epoch 19/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5662 - accuracy: 0.4614 - val_loss: 1.6090 - val_accuracy: 0.4498 Epoch 20/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5679 - accuracy: 0.4601 - val_loss: 1.6209 - val_accuracy: 0.4466 Epoch 21/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5608 - accuracy: 0.4625 - val_loss: 1.6657 - val_accuracy: 0.4327 Epoch 22/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5649 - accuracy: 0.4626 - val_loss: 1.6560 - val_accuracy: 0.4371 Epoch 23/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5579 - accuracy: 0.4673 - val_loss: 1.5952 - val_accuracy: 0.4521 Epoch 24/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5645 - accuracy: 0.4620 - val_loss: 1.6054 - val_accuracy: 0.4476 Epoch 25/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5543 - accuracy: 0.4659 - val_loss: 1.6297 - val_accuracy: 0.4443 Epoch 26/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5590 - accuracy: 0.4647 - val_loss: 1.6501 - val_accuracy: 0.4360 Epoch 27/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5575 - accuracy: 0.4665 - val_loss: 1.6991 - val_accuracy: 0.4223 Epoch 28/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5520 - accuracy: 0.4650 - val_loss: 1.6310 - val_accuracy: 0.4436 Epoch 29/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5494 - accuracy: 0.4697 - val_loss: 1.5702 - val_accuracy: 0.4664 Epoch 30/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5504 - accuracy: 0.4670 - val_loss: 1.6410 - val_accuracy: 0.4398 Epoch 31/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5465 - accuracy: 0.4684 - val_loss: 1.6668 - val_accuracy: 0.4364 Epoch 32/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5467 - accuracy: 0.4675 - val_loss: 1.5830 - val_accuracy: 0.4605 Epoch 33/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5545 - accuracy: 0.4665 - val_loss: 1.6068 - val_accuracy: 0.4500 Epoch 34/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5500 - accuracy: 0.4692 - val_loss: 1.5992 - val_accuracy: 0.4602 Epoch 35/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5527 - accuracy: 0.4685 - val_loss: 1.5877 - val_accuracy: 0.4614 Epoch 36/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5504 - accuracy: 0.4660 - val_loss: 1.7039 - val_accuracy: 0.4229 Epoch 37/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5395 - accuracy: 0.4746 - val_loss: 1.7030 - val_accuracy: 0.4163 Epoch 38/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5453 - accuracy: 0.4691 - val_loss: 1.6855 - val_accuracy: 0.4337 Epoch 39/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5397 - accuracy: 0.4720 - val_loss: 1.6490 - val_accuracy: 0.4488 Epoch 40/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5375 - accuracy: 0.4730 - val_loss: 1.6199 - val_accuracy: 0.4461 Epoch 41/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5447 - accuracy: 0.4715 - val_loss: 1.6120 - val_accuracy: 0.4580 Epoch 42/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5420 - accuracy: 0.4715 - val_loss: 1.6020 - val_accuracy: 0.4570 Epoch 43/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5407 - accuracy: 0.4720 - val_loss: 1.6009 - val_accuracy: 0.4540 Epoch 44/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5312 - accuracy: 0.4744 - val_loss: 1.6885 - val_accuracy: 0.4288 Epoch 45/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5387 - accuracy: 0.4715 - val_loss: 1.6210 - val_accuracy: 0.4539 Epoch 46/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5330 - accuracy: 0.4741 - val_loss: 1.6167 - val_accuracy: 0.4509 Epoch 47/50 1563/1563 [==============================] - 7s 4ms/step - loss: 1.5378 - accuracy: 0.4740 - val_loss: 1.6112 - val_accuracy: 0.4546 Epoch 48/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5365 - accuracy: 0.4741 - val_loss: 1.6579 - val_accuracy: 0.4364 Epoch 49/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5330 - accuracy: 0.4770 - val_loss: 1.6449 - val_accuracy: 0.4392 Epoch 50/50 1563/1563 [==============================] - 6s 4ms/step - loss: 1.5344 - accuracy: 0.4751 - val_loss: 1.6518 - val_accuracy: 0.4385
time_to_predict6 = end_time-start_time
print('total elapsed time to predict: ' + str(time_to_predict6))
total elapsed time to predict: 0:05:23.235625
loss, accuracy = model6.evaluate(x_test, y_test_cat)
print('test set accuracy: ', accuracy * 100)
313/313 [==============================] - 2s 6ms/step - loss: 1.6518 - accuracy: 0.4385 test set accuracy: 43.84999871253967
model7=Sequential()
model7.add(Conv2D(filters=128,kernel_size=(3,3),input_shape=(32,32,3),activation='relu'))
model7.add(MaxPool2D(pool_size=(2,2)))
model7.add(Dropout(0.25))
model7.add(Conv2D(filters=256,kernel_size=(3,3),activation='relu'))
model7.add(MaxPool2D(pool_size=(2,2)))
model7.add(Dropout(0.3))
model7.add(Flatten())
model7.add(Dense(384,activation='relu',kernel_regularizer=tf.keras.regularizers.L2(0.001)))
model7.add(BatchNormalization())
model7.add(Dropout(0.4))
model7.add(Dense(10,activation='softmax'))
model7.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 30, 30, 128) 3584
max_pooling2d (MaxPooling2D (None, 15, 15, 128) 0
)
dropout (Dropout) (None, 15, 15, 128) 0
conv2d_1 (Conv2D) (None, 13, 13, 256) 295168
max_pooling2d_1 (MaxPooling (None, 6, 6, 256) 0
2D)
dropout_1 (Dropout) (None, 6, 6, 256) 0
flatten (Flatten) (None, 9216) 0
dense (Dense) (None, 384) 3539328
batch_normalization (BatchN (None, 384) 1536
ormalization)
dropout_2 (Dropout) (None, 384) 0
dense_1 (Dense) (None, 10) 3850
=================================================================
Total params: 3,843,466
Trainable params: 3,842,698
Non-trainable params: 768
_________________________________________________________________
keras.utils.plot_model(model7, "CIFAR10.png", show_shapes=True)
model7.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
start_time = datetime.datetime.now()
history7=model7.fit(x_train,y_train_cat,epochs=50,validation_data=(x_test,y_test_cat))
end_time = datetime.datetime.now()
Epoch 1/50 1563/1563 [==============================] - 22s 8ms/step - loss: 1.8821 - accuracy: 0.4800 - val_loss: 1.4453 - val_accuracy: 0.6186 Epoch 2/50 1563/1563 [==============================] - 13s 9ms/step - loss: 1.5091 - accuracy: 0.5901 - val_loss: 1.4074 - val_accuracy: 0.6277 Epoch 3/50 1563/1563 [==============================] - 14s 9ms/step - loss: 1.4406 - accuracy: 0.6227 - val_loss: 1.3230 - val_accuracy: 0.6678 Epoch 4/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.3999 - accuracy: 0.6425 - val_loss: 1.3683 - val_accuracy: 0.6540 Epoch 5/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.3658 - accuracy: 0.6589 - val_loss: 1.3441 - val_accuracy: 0.6624 Epoch 6/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.3491 - accuracy: 0.6658 - val_loss: 1.2042 - val_accuracy: 0.7085 Epoch 7/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.3233 - accuracy: 0.6749 - val_loss: 1.2390 - val_accuracy: 0.7043 Epoch 8/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.3072 - accuracy: 0.6815 - val_loss: 1.1833 - val_accuracy: 0.7210 Epoch 9/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.2912 - accuracy: 0.6863 - val_loss: 1.2493 - val_accuracy: 0.7021 Epoch 10/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.2774 - accuracy: 0.6953 - val_loss: 1.1940 - val_accuracy: 0.7221 Epoch 11/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.2682 - accuracy: 0.6993 - val_loss: 1.2756 - val_accuracy: 0.6859 Epoch 12/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.2633 - accuracy: 0.7011 - val_loss: 1.2289 - val_accuracy: 0.7125 Epoch 13/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.2435 - accuracy: 0.7078 - val_loss: 1.2490 - val_accuracy: 0.7032 Epoch 14/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.2431 - accuracy: 0.7080 - val_loss: 1.1690 - val_accuracy: 0.7237 Epoch 15/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.2284 - accuracy: 0.7121 - val_loss: 1.2036 - val_accuracy: 0.7182 Epoch 16/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.2220 - accuracy: 0.7148 - val_loss: 1.2880 - val_accuracy: 0.6909 Epoch 17/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.2085 - accuracy: 0.7196 - val_loss: 1.1288 - val_accuracy: 0.7408 Epoch 18/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.2019 - accuracy: 0.7222 - val_loss: 1.2154 - val_accuracy: 0.7126 Epoch 19/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1953 - accuracy: 0.7216 - val_loss: 1.1464 - val_accuracy: 0.7422 Epoch 20/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1885 - accuracy: 0.7246 - val_loss: 1.1935 - val_accuracy: 0.7200 Epoch 21/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1803 - accuracy: 0.7285 - val_loss: 1.1964 - val_accuracy: 0.7219 Epoch 22/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1733 - accuracy: 0.7312 - val_loss: 1.1621 - val_accuracy: 0.7310 Epoch 23/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1689 - accuracy: 0.7279 - val_loss: 1.1897 - val_accuracy: 0.7221 Epoch 24/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1592 - accuracy: 0.7329 - val_loss: 1.1172 - val_accuracy: 0.7474 Epoch 25/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1530 - accuracy: 0.7339 - val_loss: 1.1950 - val_accuracy: 0.7174 Epoch 26/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1470 - accuracy: 0.7387 - val_loss: 1.0816 - val_accuracy: 0.7610 Epoch 27/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1446 - accuracy: 0.7366 - val_loss: 1.1349 - val_accuracy: 0.7455 Epoch 28/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1407 - accuracy: 0.7418 - val_loss: 1.1627 - val_accuracy: 0.7376 Epoch 29/50 1563/1563 [==============================] - 12s 8ms/step - loss: 1.1330 - accuracy: 0.7449 - val_loss: 1.1106 - val_accuracy: 0.7512 Epoch 30/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1295 - accuracy: 0.7428 - val_loss: 1.1596 - val_accuracy: 0.7345 Epoch 31/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1260 - accuracy: 0.7448 - val_loss: 1.1449 - val_accuracy: 0.7323 Epoch 32/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1249 - accuracy: 0.7467 - val_loss: 1.1105 - val_accuracy: 0.7499 Epoch 33/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1186 - accuracy: 0.7455 - val_loss: 1.1667 - val_accuracy: 0.7325 Epoch 34/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1255 - accuracy: 0.7473 - val_loss: 1.0826 - val_accuracy: 0.7593 Epoch 35/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1127 - accuracy: 0.7491 - val_loss: 1.0889 - val_accuracy: 0.7624 Epoch 36/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1094 - accuracy: 0.7492 - val_loss: 1.0817 - val_accuracy: 0.7669 Epoch 37/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1080 - accuracy: 0.7522 - val_loss: 1.1251 - val_accuracy: 0.7455 Epoch 38/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.1056 - accuracy: 0.7527 - val_loss: 1.0701 - val_accuracy: 0.7663 Epoch 39/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.0959 - accuracy: 0.7523 - val_loss: 1.0624 - val_accuracy: 0.7655 Epoch 40/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.0965 - accuracy: 0.7532 - val_loss: 1.1137 - val_accuracy: 0.7479 Epoch 41/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.0965 - accuracy: 0.7539 - val_loss: 1.0770 - val_accuracy: 0.7578 Epoch 42/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.0900 - accuracy: 0.7563 - val_loss: 1.0764 - val_accuracy: 0.7598 Epoch 43/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.0946 - accuracy: 0.7567 - val_loss: 1.0942 - val_accuracy: 0.7596 Epoch 44/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.0827 - accuracy: 0.7590 - val_loss: 1.1380 - val_accuracy: 0.7369 Epoch 45/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.0831 - accuracy: 0.7567 - val_loss: 1.2926 - val_accuracy: 0.6878 Epoch 46/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.0799 - accuracy: 0.7588 - val_loss: 1.1541 - val_accuracy: 0.7322 Epoch 47/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.0709 - accuracy: 0.7644 - val_loss: 1.0244 - val_accuracy: 0.7815 Epoch 48/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.0704 - accuracy: 0.7636 - val_loss: 1.0528 - val_accuracy: 0.7682 Epoch 49/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.0718 - accuracy: 0.7622 - val_loss: 1.0607 - val_accuracy: 0.7655 Epoch 50/50 1563/1563 [==============================] - 13s 8ms/step - loss: 1.0679 - accuracy: 0.7636 - val_loss: 1.0850 - val_accuracy: 0.7601
time_to_predict7 = end_time-start_time
print('total elapsed time to predict: ' + str(time_to_predict7))
total elapsed time to predict: 0:11:24.022771
loss, accuracy = model7.evaluate(x_test, y_test_cat)
print('test set accuracy: ', accuracy * 100)
313/313 [==============================] - 1s 4ms/step - loss: 1.0850 - accuracy: 0.7601 test set accuracy: 76.010000705719
# Predictions
preds7 = model7.predict(x_test)
print('shape of preds: ', preds7.shape)
313/313 [==============================] - 1s 3ms/step shape of preds: (10000, 10)
# Plot Prediction Metrics
history7_dict = history7.history
history7_dict.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy'])
history7_df=pd.DataFrame(history7_dict)
history7_df.tail().round(3)
| loss | accuracy | val_loss | val_accuracy | |
|---|---|---|---|---|
| 32 | 0.257 | 0.909 | 0.757 | 0.785 |
| 33 | 0.252 | 0.910 | 0.789 | 0.777 |
| 34 | 0.251 | 0.911 | 0.723 | 0.786 |
| 35 | 0.246 | 0.912 | 0.724 | 0.785 |
| 36 | 0.238 | 0.915 | 0.767 | 0.780 |
pd.DataFrame(history7.history).plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7f8716569e90>
losses7 = history7.history['loss']
accs7 = history7.history['accuracy']
val_losses7 = history7.history['val_loss']
val_accs7 = history7.history['val_accuracy']
epochs7 = len(losses7)
# Confusion Matrix
pred7= model7.predict(x_test)
pred7=np.argmax(pred7, axis=1)
313/313 [==============================] - 1s 3ms/step
print_validation_report(y_test, pred7)
Classification Report
precision recall f1-score support
0 0.81 0.79 0.80 1000
1 0.91 0.84 0.88 1000
2 0.78 0.54 0.64 1000
3 0.64 0.54 0.58 1000
4 0.62 0.84 0.71 1000
5 0.71 0.66 0.68 1000
6 0.65 0.92 0.76 1000
7 0.88 0.74 0.81 1000
8 0.90 0.83 0.86 1000
9 0.81 0.90 0.85 1000
accuracy 0.76 10000
macro avg 0.77 0.76 0.76 10000
weighted avg 0.77 0.76 0.76 10000
Accuracy Score: 0.7601
Root Mean Square Error: 1.9854470529329158
plot_confusion_matrix(y_test,pred7)
preds7 = model7.predict(x_test)
313/313 [==============================] - 1s 3ms/step
preds7.shape
(10000, 10)
cm = sns.light_palette((260, 75, 60), input="husl", as_cmap=True)
df7 = pd.DataFrame(preds7[0:20], columns = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck'])
df7.style.format("{:.2%}").background_gradient(cmap=cm)
| airplane | automobile | bird | cat | deer | dog | frog | horse | ship | truck | |
|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 0.44% | 0.05% | 0.51% | 82.89% | 1.71% | 9.93% | 3.24% | 0.32% | 0.80% | 0.12% |
| 1 | 0.42% | 17.59% | 0.00% | 0.01% | 0.00% | 0.00% | 0.00% | 0.00% | 81.97% | 0.01% |
| 2 | 7.87% | 10.04% | 0.21% | 0.38% | 0.11% | 0.04% | 0.38% | 0.09% | 79.12% | 1.77% |
| 3 | 54.26% | 3.23% | 0.73% | 0.14% | 0.16% | 0.01% | 1.28% | 0.02% | 40.01% | 0.17% |
| 4 | 0.00% | 0.00% | 2.20% | 1.33% | 1.94% | 0.13% | 94.37% | 0.00% | 0.01% | 0.00% |
| 5 | 0.01% | 0.12% | 0.30% | 1.29% | 1.08% | 0.92% | 95.58% | 0.51% | 0.05% | 0.14% |
| 6 | 0.01% | 49.15% | 0.02% | 0.18% | 0.01% | 0.12% | 0.25% | 0.02% | 0.02% | 50.23% |
| 7 | 1.27% | 0.02% | 9.66% | 8.82% | 25.53% | 4.39% | 48.74% | 0.80% | 0.58% | 0.20% |
| 8 | 0.11% | 0.01% | 1.50% | 80.58% | 6.32% | 5.42% | 4.36% | 1.52% | 0.17% | 0.02% |
| 9 | 5.77% | 54.82% | 0.88% | 0.93% | 3.95% | 0.35% | 3.63% | 0.63% | 3.61% | 25.43% |
| 10 | 67.01% | 0.04% | 2.96% | 8.34% | 13.82% | 1.98% | 1.56% | 0.58% | 3.26% | 0.44% |
| 11 | 0.00% | 0.15% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 99.85% |
| 12 | 0.13% | 0.18% | 7.40% | 9.05% | 21.47% | 41.08% | 5.13% | 15.16% | 0.25% | 0.15% |
| 13 | 0.01% | 0.01% | 0.06% | 0.25% | 2.31% | 1.52% | 0.06% | 95.65% | 0.01% | 0.10% |
| 14 | 0.00% | 0.08% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.01% | 99.90% |
| 15 | 0.23% | 0.16% | 0.44% | 0.79% | 0.53% | 0.12% | 95.94% | 0.00% | 1.69% | 0.09% |
| 16 | 0.03% | 0.05% | 0.26% | 11.72% | 0.13% | 83.35% | 2.38% | 0.54% | 0.70% | 0.87% |
| 17 | 0.22% | 0.95% | 3.44% | 28.23% | 12.30% | 21.15% | 10.18% | 18.03% | 0.64% | 4.85% |
| 18 | 0.51% | 9.20% | 0.01% | 0.03% | 0.01% | 0.00% | 0.05% | 0.00% | 83.44% | 6.74% |
| 19 | 0.00% | 0.00% | 0.05% | 0.04% | 0.15% | 0.01% | 99.76% | 0.00% | 0.00% | 0.00% |
(_,_), (test_images, test_labels) = tf.keras.datasets.cifar10.load_data()
img = test_images[2004]
img_tensor = image.img_to_array(img)
img_tensor = np.expand_dims(img_tensor, axis=0)
class_names = ['airplane'
,'automobile'
,'bird'
,'cat'
,'deer'
,'dog'
,'frog'
,'horse'
,'ship'
,'truck']
plt.imshow(img, cmap='viridis')
plt.axis('off')
plt.show()
# Extracts the outputs of the top 8 layers:
layer_outputs7 = [layer.output for layer in model7.layers[:8]]
# Creates a model that will return these outputs, given the model input:
activation_model7 = models.Model(inputs=model7.input, outputs=layer_outputs7)
activations7 = activation_model7.predict(img_tensor)
len(activations7)
1/1 [==============================] - 0s 109ms/step
8
layer_names7 = []
for layer in model7.layers:
layer_names7.append(layer.name)
layer_names7
['conv2d', 'max_pooling2d', 'dropout', 'conv2d_1', 'max_pooling2d_1', 'dropout_1', 'flatten', 'dense', 'batch_normalization', 'dropout_2', 'dense_1']
# These are the names of the layers, so can have them as part of our plot
layer_names7 = []
for layer in model7.layers[:3]:
layer_names7.append(layer.name)
images_per_row = 16
# Now let's display our feature maps
for layer_name, layer_activation7 in zip(layer_names7, activations7):
# This is the number of features in the feature map
n_features = layer_activation7.shape[-1]
# The feature map has shape (1, size, size, n_features)
size = layer_activation7.shape[1]
# We will tile the activation channels in this matrix
n_cols = n_features // images_per_row
display_grid = np.zeros((size * n_cols, images_per_row * size))
# We'll tile each filter into this big horizontal grid
for col in range(n_cols):
for row in range(images_per_row):
channel_image = layer_activation7[0,
:, :,
col * images_per_row + row]
# Post-process the feature to make it visually palatable
channel_image -= channel_image.mean()
channel_image /= channel_image.std()
channel_image *= 64
channel_image += 128
channel_image = np.clip(channel_image, 0, 255).astype('uint8')
display_grid[col * size : (col + 1) * size,
row * size : (row + 1) * size] = channel_image
# Display the grid
scale = 1. / size
plt.figure(figsize=(scale * display_grid.shape[1],
scale * display_grid.shape[0]))
plt.title(layer_name)
plt.grid(False)
plt.imshow(display_grid, aspect='auto', cmap='viridis')
plt.show();
/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:28: RuntimeWarning: invalid value encountered in true_divide
model8=Sequential()
model8.add(Conv2D(filters=128,kernel_size=(3,3),input_shape=(32,32,3),activation='relu'))
model8.add(MaxPool2D(pool_size=(2,2)))
model8.add(Dropout(0.25))
model8.add(Conv2D(filters=256,kernel_size=(3,3),activation='relu'))
model8.add(MaxPool2D(pool_size=(2,2)))
model8.add(Dropout(0.3))
model8.add(Conv2D(filters=512,kernel_size=(3,3),activation='relu'))
model8.add(MaxPool2D(pool_size=(2,2)))
model8.add(Dropout(0.3))
model8.add(Flatten())
model8.add(Dense(384,activation='relu',kernel_regularizer=tf.keras.regularizers.L2(0.001)))
model8.add(BatchNormalization())
model8.add(Dropout(0.4))
model8.add(Dense(10,activation='softmax'))
model8.summary()
Model: "sequential_2"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_5 (Conv2D) (None, 30, 30, 128) 3584
max_pooling2d_5 (MaxPooling (None, 15, 15, 128) 0
2D)
dropout_7 (Dropout) (None, 15, 15, 128) 0
conv2d_6 (Conv2D) (None, 13, 13, 256) 295168
max_pooling2d_6 (MaxPooling (None, 6, 6, 256) 0
2D)
dropout_8 (Dropout) (None, 6, 6, 256) 0
conv2d_7 (Conv2D) (None, 4, 4, 512) 1180160
max_pooling2d_7 (MaxPooling (None, 2, 2, 512) 0
2D)
dropout_9 (Dropout) (None, 2, 2, 512) 0
flatten_2 (Flatten) (None, 2048) 0
dense_4 (Dense) (None, 384) 786816
batch_normalization_2 (Batc (None, 384) 1536
hNormalization)
dropout_10 (Dropout) (None, 384) 0
dense_5 (Dense) (None, 10) 3850
=================================================================
Total params: 2,271,114
Trainable params: 2,270,346
Non-trainable params: 768
_________________________________________________________________
keras.utils.plot_model(model8, "CIFAR10.png", show_shapes=True)
model8.compile(loss='categorical_crossentropy',optimizer='adam',metrics=['accuracy'])
start_time = datetime.datetime.now()
history8=model8.fit(x_train,y_train_cat,epochs=50,validation_data=(x_test,y_test_cat))
end_time = datetime.datetime.now()
Epoch 1/50 1563/1563 [==============================] - 16s 9ms/step - loss: 1.8823 - accuracy: 0.4245 - val_loss: 1.5053 - val_accuracy: 0.5328 Epoch 2/50 1563/1563 [==============================] - 14s 9ms/step - loss: 1.3695 - accuracy: 0.5725 - val_loss: 1.2301 - val_accuracy: 0.6186 Epoch 3/50 1563/1563 [==============================] - 14s 9ms/step - loss: 1.2293 - accuracy: 0.6225 - val_loss: 1.1258 - val_accuracy: 0.6604 Epoch 4/50 1563/1563 [==============================] - 14s 9ms/step - loss: 1.1540 - accuracy: 0.6567 - val_loss: 1.1332 - val_accuracy: 0.6680 Epoch 5/50 1563/1563 [==============================] - 14s 9ms/step - loss: 1.1009 - accuracy: 0.6758 - val_loss: 1.0253 - val_accuracy: 0.7035 Epoch 6/50 1563/1563 [==============================] - 14s 9ms/step - loss: 1.0606 - accuracy: 0.6923 - val_loss: 1.0585 - val_accuracy: 0.6943 Epoch 7/50 1563/1563 [==============================] - 14s 9ms/step - loss: 1.0247 - accuracy: 0.7070 - val_loss: 0.9485 - val_accuracy: 0.7346 Epoch 8/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.9887 - accuracy: 0.7166 - val_loss: 1.0943 - val_accuracy: 0.6869 Epoch 9/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.9655 - accuracy: 0.7259 - val_loss: 0.8764 - val_accuracy: 0.7596 Epoch 10/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.9429 - accuracy: 0.7380 - val_loss: 0.9025 - val_accuracy: 0.7507 Epoch 11/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.9255 - accuracy: 0.7418 - val_loss: 0.8808 - val_accuracy: 0.7561 Epoch 12/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.9062 - accuracy: 0.7481 - val_loss: 0.8520 - val_accuracy: 0.7666 Epoch 13/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.8802 - accuracy: 0.7584 - val_loss: 0.9107 - val_accuracy: 0.7514 Epoch 14/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.8681 - accuracy: 0.7629 - val_loss: 0.8292 - val_accuracy: 0.7799 Epoch 15/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.8620 - accuracy: 0.7666 - val_loss: 0.8201 - val_accuracy: 0.7831 Epoch 16/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.8462 - accuracy: 0.7699 - val_loss: 0.8740 - val_accuracy: 0.7637 Epoch 17/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.8344 - accuracy: 0.7778 - val_loss: 0.7987 - val_accuracy: 0.7879 Epoch 18/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.8148 - accuracy: 0.7811 - val_loss: 0.8190 - val_accuracy: 0.7814 Epoch 19/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.8107 - accuracy: 0.7822 - val_loss: 0.7960 - val_accuracy: 0.7865 Epoch 20/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.7953 - accuracy: 0.7888 - val_loss: 0.8109 - val_accuracy: 0.7868 Epoch 21/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.7899 - accuracy: 0.7915 - val_loss: 0.8150 - val_accuracy: 0.7846 Epoch 22/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.7723 - accuracy: 0.7957 - val_loss: 0.7823 - val_accuracy: 0.7894 Epoch 23/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.7629 - accuracy: 0.7996 - val_loss: 0.8252 - val_accuracy: 0.7813 Epoch 24/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.7612 - accuracy: 0.8011 - val_loss: 0.7847 - val_accuracy: 0.7928 Epoch 25/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.7517 - accuracy: 0.8044 - val_loss: 0.7867 - val_accuracy: 0.7917 Epoch 26/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.7366 - accuracy: 0.8103 - val_loss: 0.8043 - val_accuracy: 0.7848 Epoch 27/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.7399 - accuracy: 0.8081 - val_loss: 0.7686 - val_accuracy: 0.7985 Epoch 28/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.7275 - accuracy: 0.8108 - val_loss: 0.7762 - val_accuracy: 0.7984 Epoch 29/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.7171 - accuracy: 0.8142 - val_loss: 0.7640 - val_accuracy: 0.8001 Epoch 30/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.7066 - accuracy: 0.8172 - val_loss: 0.7308 - val_accuracy: 0.8107 Epoch 31/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.7106 - accuracy: 0.8171 - val_loss: 0.7596 - val_accuracy: 0.8042 Epoch 32/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6964 - accuracy: 0.8215 - val_loss: 0.8013 - val_accuracy: 0.7883 Epoch 33/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6953 - accuracy: 0.8234 - val_loss: 0.7487 - val_accuracy: 0.8049 Epoch 34/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6891 - accuracy: 0.8253 - val_loss: 0.7460 - val_accuracy: 0.8085 Epoch 35/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6801 - accuracy: 0.8260 - val_loss: 0.7713 - val_accuracy: 0.7971 Epoch 36/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6766 - accuracy: 0.8288 - val_loss: 0.7434 - val_accuracy: 0.8089 Epoch 37/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6735 - accuracy: 0.8266 - val_loss: 0.7541 - val_accuracy: 0.8058 Epoch 38/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6645 - accuracy: 0.8305 - val_loss: 0.7338 - val_accuracy: 0.8094 Epoch 39/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6540 - accuracy: 0.8362 - val_loss: 0.7420 - val_accuracy: 0.8071 Epoch 40/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6503 - accuracy: 0.8358 - val_loss: 0.7442 - val_accuracy: 0.8066 Epoch 41/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6495 - accuracy: 0.8363 - val_loss: 0.7611 - val_accuracy: 0.8057 Epoch 42/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6483 - accuracy: 0.8383 - val_loss: 0.7472 - val_accuracy: 0.8067 Epoch 43/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6390 - accuracy: 0.8407 - val_loss: 0.7150 - val_accuracy: 0.8175 Epoch 44/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6327 - accuracy: 0.8426 - val_loss: 0.7276 - val_accuracy: 0.8130 Epoch 45/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6318 - accuracy: 0.8429 - val_loss: 0.7242 - val_accuracy: 0.8170 Epoch 46/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6291 - accuracy: 0.8438 - val_loss: 0.7319 - val_accuracy: 0.8124 Epoch 47/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6205 - accuracy: 0.8462 - val_loss: 0.7513 - val_accuracy: 0.8079 Epoch 48/50 1563/1563 [==============================] - 15s 9ms/step - loss: 0.6189 - accuracy: 0.8461 - val_loss: 0.7680 - val_accuracy: 0.8020 Epoch 49/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6140 - accuracy: 0.8481 - val_loss: 0.7248 - val_accuracy: 0.8150 Epoch 50/50 1563/1563 [==============================] - 14s 9ms/step - loss: 0.6132 - accuracy: 0.8494 - val_loss: 0.7154 - val_accuracy: 0.8168
time_to_predict8 = end_time-start_time
print('total elapsed time to predict: ' + str(time_to_predict8))
total elapsed time to predict: 0:11:55.937846
loss, accuracy = model8.evaluate(x_test, y_test_cat)
print('test set accuracy: ', accuracy * 100)
313/313 [==============================] - 1s 5ms/step - loss: 0.7154 - accuracy: 0.8168 test set accuracy: 81.67999982833862
# Predictions
preds8 = model8.predict(x_test)
print('shape of preds: ', preds8.shape)
313/313 [==============================] - 1s 3ms/step shape of preds: (10000, 10)
# Plot Prediction Metrics
history8_dict = history8.history
history8_dict.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy'])
history8_df=pd.DataFrame(history8_dict)
history8_df.tail().round(3)
| loss | accuracy | val_loss | val_accuracy | |
|---|---|---|---|---|
| 45 | 0.629 | 0.844 | 0.732 | 0.812 |
| 46 | 0.621 | 0.846 | 0.751 | 0.808 |
| 47 | 0.619 | 0.846 | 0.768 | 0.802 |
| 48 | 0.614 | 0.848 | 0.725 | 0.815 |
| 49 | 0.613 | 0.849 | 0.715 | 0.817 |
pd.DataFrame(history8.history).plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7f86fb7cb510>
losses8 = history8.history['loss']
accs8 = history8.history['accuracy']
val_losses8 = history8.history['val_loss']
val_accs8 = history8.history['val_accuracy']
epochs8 = len(losses8)
# Confusion Matrix
pred8 = model8.predict(x_test)
pred8 =np.argmax(pred8, axis=1)
313/313 [==============================] - 1s 3ms/step
print_validation_report(y_test, pred8)
Classification Report
precision recall f1-score support
0 0.85 0.82 0.84 1000
1 0.95 0.88 0.91 1000
2 0.77 0.73 0.75 1000
3 0.68 0.65 0.66 1000
4 0.78 0.82 0.80 1000
5 0.69 0.79 0.74 1000
6 0.91 0.83 0.87 1000
7 0.87 0.83 0.85 1000
8 0.84 0.92 0.88 1000
9 0.87 0.90 0.88 1000
accuracy 0.82 10000
macro avg 0.82 0.82 0.82 10000
weighted avg 0.82 0.82 0.82 10000
Accuracy Score: 0.8168
Root Mean Square Error: 1.7426703646989583
plot_confusion_matrix(y_test,pred8)
preds8 = model8.predict(x_test)
313/313 [==============================] - 1s 3ms/step
# Predictions
df8 = pd.DataFrame(preds8[0:20], columns = ['airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck'])
df8.style.format("{:.2%}").background_gradient(cmap=cm)
| airplane | automobile | bird | cat | deer | dog | frog | horse | ship | truck | |
|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 1.86% | 0.03% | 0.94% | 93.15% | 0.13% | 2.72% | 0.21% | 0.02% | 0.92% | 0.03% |
| 1 | 1.56% | 1.58% | 0.01% | 0.05% | 0.00% | 0.01% | 0.01% | 0.01% | 96.73% | 0.04% |
| 2 | 0.56% | 0.29% | 0.02% | 0.14% | 0.08% | 0.05% | 0.06% | 0.12% | 98.33% | 0.35% |
| 3 | 93.85% | 0.10% | 0.85% | 0.62% | 0.61% | 0.11% | 0.10% | 0.12% | 3.51% | 0.13% |
| 4 | 0.01% | 0.00% | 2.80% | 0.64% | 21.57% | 0.13% | 74.82% | 0.00% | 0.01% | 0.00% |
| 5 | 0.02% | 0.01% | 0.35% | 3.16% | 0.78% | 2.33% | 93.22% | 0.05% | 0.06% | 0.01% |
| 6 | 0.02% | 60.94% | 0.06% | 0.61% | 0.03% | 0.71% | 0.29% | 0.03% | 0.01% | 37.29% |
| 7 | 0.24% | 0.03% | 21.17% | 4.48% | 5.42% | 1.28% | 67.08% | 0.16% | 0.07% | 0.08% |
| 8 | 0.02% | 0.00% | 0.29% | 93.94% | 1.43% | 3.56% | 0.66% | 0.08% | 0.01% | 0.01% |
| 9 | 0.24% | 43.06% | 0.08% | 0.28% | 0.09% | 0.13% | 0.84% | 0.03% | 1.47% | 53.76% |
| 10 | 72.41% | 0.42% | 4.16% | 10.19% | 0.61% | 2.64% | 0.51% | 1.46% | 7.39% | 0.20% |
| 11 | 0.00% | 0.02% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 99.98% |
| 12 | 0.56% | 0.04% | 4.18% | 25.54% | 5.47% | 61.37% | 1.86% | 0.89% | 0.03% | 0.04% |
| 13 | 0.00% | 0.00% | 0.01% | 0.26% | 0.37% | 1.80% | 0.01% | 97.56% | 0.00% | 0.00% |
| 14 | 0.00% | 0.05% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 99.95% |
| 15 | 2.62% | 0.35% | 0.02% | 0.12% | 0.08% | 0.01% | 0.39% | 0.01% | 96.32% | 0.07% |
| 16 | 0.05% | 0.01% | 0.15% | 9.35% | 0.12% | 89.41% | 0.08% | 0.77% | 0.02% | 0.04% |
| 17 | 0.20% | 0.56% | 0.51% | 24.33% | 9.97% | 27.40% | 1.77% | 9.16% | 0.20% | 25.90% |
| 18 | 0.24% | 0.04% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 0.00% | 99.70% | 0.02% |
| 19 | 0.00% | 0.01% | 0.08% | 0.05% | 0.19% | 0.02% | 99.62% | 0.00% | 0.02% | 0.00% |
# Extracts the outputs of the top 8 layers:
layer_outputs8 = [layer.output for layer in model8.layers[:8]]
# Creates a model that will return these outputs, given the model input:
activation_model8 = models.Model(inputs=model8.input, outputs=layer_outputs8)
activations8 = activation_model8.predict(img_tensor)
len(activations8)
1/1 [==============================] - 0s 103ms/step
8
layer_names8 = []
for layer in model8.layers:
layer_names8.append(layer.name)
layer_names8
['conv2d_5', 'max_pooling2d_5', 'dropout_7', 'conv2d_6', 'max_pooling2d_6', 'dropout_8', 'conv2d_7', 'max_pooling2d_7', 'dropout_9', 'flatten_2', 'dense_4', 'batch_normalization_2', 'dropout_10', 'dense_5']
# These are the names of the layers, so can have them as part of our plot
layer_names8 = []
for layer in model8.layers[:3]:
layer_names8.append(layer.name)
images_per_row = 16
# Now let's display our feature maps
for layer_name, layer_activation8 in zip(layer_names8, activations8):
# This is the number of features in the feature map
n_features = layer_activation8.shape[-1]
# The feature map has shape (1, size, size, n_features)
size = layer_activation8.shape[1]
# We will tile the activation channels in this matrix
n_cols = n_features // images_per_row
display_grid = np.zeros((size * n_cols, images_per_row * size))
# We'll tile each filter into this big horizontal grid
for col in range(n_cols):
for row in range(images_per_row):
channel_image = layer_activation8[0,
:, :,
col * images_per_row + row]
# Post-process the feature to make it visually palatable
channel_image -= channel_image.mean()
channel_image /= channel_image.std()
channel_image *= 64
channel_image += 128
channel_image = np.clip(channel_image, 0, 255).astype('uint8')
display_grid[col * size : (col + 1) * size,
row * size : (row + 1) * size] = channel_image
# Display the grid
scale = 1. / size
plt.figure(figsize=(scale * display_grid.shape[1],
scale * display_grid.shape[0]))
plt.title(layer_name)
plt.grid(False)
plt.imshow(display_grid, aspect='auto', cmap='viridis')
plt.show();
/usr/local/lib/python3.7/dist-packages/ipykernel_launcher.py:28: RuntimeWarning: invalid value encountered in true_divide
model9=Sequential()
model9.add(Conv2D(filters=128,kernel_size=(3,3),input_shape=(32,32,3),activation='relu'))
model9.add(BatchNormalization())
model9.add(Conv2D(filters=128,kernel_size=(3,3),activation='relu'))
model9.add(MaxPool2D(pool_size=(2,2)))
model9.add(Dropout(0.25))
model9.add(Conv2D(filters=256,kernel_size=(3,3),activation='relu'))
model9.add(BatchNormalization())
model9.add(Conv2D(filters=256,kernel_size=(3,3),activation='relu'))
model9.add(MaxPool2D(pool_size=(2,2)))
model9.add(Dropout(0.3))
model9.add(Conv2D(filters=512,kernel_size=(3,3),activation='relu'))
model9.add(MaxPool2D(pool_size=(2,2)))
model9.add(Dropout(0.3))
model9.add(Flatten())
model9.add(Dense(384,activation='relu',kernel_regularizer=tf.keras.regularizers.L2(0.001)))
model9.add(BatchNormalization())
model9.add(Dropout(0.4))
model9.add(Dense(10,activation='softmax'))
model9.summary()
Model: "sequential_4"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_13 (Conv2D) (None, 30, 30, 128) 3584
batch_normalization_6 (Batc (None, 30, 30, 128) 512
hNormalization)
conv2d_14 (Conv2D) (None, 28, 28, 128) 147584
max_pooling2d_11 (MaxPoolin (None, 14, 14, 128) 0
g2D)
dropout_15 (Dropout) (None, 14, 14, 128) 0
conv2d_15 (Conv2D) (None, 12, 12, 256) 295168
batch_normalization_7 (Batc (None, 12, 12, 256) 1024
hNormalization)
conv2d_16 (Conv2D) (None, 10, 10, 256) 590080
max_pooling2d_12 (MaxPoolin (None, 5, 5, 256) 0
g2D)
dropout_16 (Dropout) (None, 5, 5, 256) 0
conv2d_17 (Conv2D) (None, 3, 3, 512) 1180160
max_pooling2d_13 (MaxPoolin (None, 1, 1, 512) 0
g2D)
dropout_17 (Dropout) (None, 1, 1, 512) 0
flatten_4 (Flatten) (None, 512) 0
dense_8 (Dense) (None, 384) 196992
batch_normalization_8 (Batc (None, 384) 1536
hNormalization)
dropout_18 (Dropout) (None, 384) 0
dense_9 (Dense) (None, 10) 3850
=================================================================
Total params: 2,420,490
Trainable params: 2,418,954
Non-trainable params: 1,536
_________________________________________________________________
keras.utils.plot_model(model9, "CIFAR10.png", show_shapes=True)
model9.compile(loss = 'categorical_crossentropy', optimizer = 'adam', metrics = ['accuracy'])
start_time = datetime.datetime.now()
history9=model9.fit(x_train,y_train_cat,epochs=50,validation_data=(x_test,y_test_cat))
end_time = datetime.datetime.now()
Epoch 1/50 1563/1563 [==============================] - 25s 15ms/step - loss: 1.9808 - accuracy: 0.3787 - val_loss: 1.6998 - val_accuracy: 0.4518 Epoch 2/50 1563/1563 [==============================] - 24s 15ms/step - loss: 1.2455 - accuracy: 0.6045 - val_loss: 1.2520 - val_accuracy: 0.5953 Epoch 3/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.9975 - accuracy: 0.6864 - val_loss: 1.0120 - val_accuracy: 0.6770 Epoch 4/50 1563/1563 [==============================] - 24s 15ms/step - loss: 0.8594 - accuracy: 0.7305 - val_loss: 1.3294 - val_accuracy: 0.5813 Epoch 5/50 1563/1563 [==============================] - 25s 16ms/step - loss: 0.7801 - accuracy: 0.7588 - val_loss: 0.7635 - val_accuracy: 0.7637 Epoch 6/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.7048 - accuracy: 0.7874 - val_loss: 0.7745 - val_accuracy: 0.7659 Epoch 7/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.6503 - accuracy: 0.8061 - val_loss: 0.8691 - val_accuracy: 0.7322 Epoch 8/50 1563/1563 [==============================] - 24s 15ms/step - loss: 0.5993 - accuracy: 0.8229 - val_loss: 0.7268 - val_accuracy: 0.7838 Epoch 9/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.5570 - accuracy: 0.8367 - val_loss: 0.6845 - val_accuracy: 0.7954 Epoch 10/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.5174 - accuracy: 0.8513 - val_loss: 0.6840 - val_accuracy: 0.8002 Epoch 11/50 1563/1563 [==============================] - 24s 15ms/step - loss: 0.4920 - accuracy: 0.8591 - val_loss: 0.6745 - val_accuracy: 0.7988 Epoch 12/50 1563/1563 [==============================] - 24s 15ms/step - loss: 0.4583 - accuracy: 0.8698 - val_loss: 0.6782 - val_accuracy: 0.8013 Epoch 13/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.4364 - accuracy: 0.8750 - val_loss: 0.7598 - val_accuracy: 0.7825 Epoch 14/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.4143 - accuracy: 0.8854 - val_loss: 0.7721 - val_accuracy: 0.7845 Epoch 15/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.3947 - accuracy: 0.8921 - val_loss: 0.6749 - val_accuracy: 0.8091 Epoch 16/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.3731 - accuracy: 0.8988 - val_loss: 0.6590 - val_accuracy: 0.8156 Epoch 17/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.3544 - accuracy: 0.9051 - val_loss: 0.6655 - val_accuracy: 0.8162 Epoch 18/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.3446 - accuracy: 0.9085 - val_loss: 0.6738 - val_accuracy: 0.8106 Epoch 19/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.3235 - accuracy: 0.9148 - val_loss: 0.7617 - val_accuracy: 0.7937 Epoch 20/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.3120 - accuracy: 0.9182 - val_loss: 0.8080 - val_accuracy: 0.7835 Epoch 21/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.3060 - accuracy: 0.9209 - val_loss: 0.6688 - val_accuracy: 0.8158 Epoch 22/50 1563/1563 [==============================] - 24s 15ms/step - loss: 0.2932 - accuracy: 0.9248 - val_loss: 0.7228 - val_accuracy: 0.8097 Epoch 23/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.2865 - accuracy: 0.9270 - val_loss: 0.6716 - val_accuracy: 0.8273 Epoch 24/50 1563/1563 [==============================] - 24s 15ms/step - loss: 0.2739 - accuracy: 0.9299 - val_loss: 0.7360 - val_accuracy: 0.8094 Epoch 25/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.2694 - accuracy: 0.9318 - val_loss: 0.6657 - val_accuracy: 0.8263 Epoch 26/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.2620 - accuracy: 0.9348 - val_loss: 0.7226 - val_accuracy: 0.8195 Epoch 27/50 1563/1563 [==============================] - 24s 15ms/step - loss: 0.2554 - accuracy: 0.9370 - val_loss: 0.7049 - val_accuracy: 0.8143 Epoch 28/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.2429 - accuracy: 0.9392 - val_loss: 0.7232 - val_accuracy: 0.8180 Epoch 29/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.2404 - accuracy: 0.9409 - val_loss: 0.7086 - val_accuracy: 0.8152 Epoch 30/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.2328 - accuracy: 0.9438 - val_loss: 0.7163 - val_accuracy: 0.8086 Epoch 31/50 1563/1563 [==============================] - 24s 15ms/step - loss: 0.2253 - accuracy: 0.9455 - val_loss: 0.7397 - val_accuracy: 0.8000 Epoch 32/50 1563/1563 [==============================] - 24s 15ms/step - loss: 0.2211 - accuracy: 0.9473 - val_loss: 0.7149 - val_accuracy: 0.8169 Epoch 33/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.2170 - accuracy: 0.9480 - val_loss: 0.7802 - val_accuracy: 0.8080 Epoch 34/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.2118 - accuracy: 0.9501 - val_loss: 0.7296 - val_accuracy: 0.8178 Epoch 35/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.2095 - accuracy: 0.9493 - val_loss: 0.7271 - val_accuracy: 0.8166 Epoch 36/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.2035 - accuracy: 0.9518 - val_loss: 0.7233 - val_accuracy: 0.8195 Epoch 37/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.2031 - accuracy: 0.9531 - val_loss: 0.6907 - val_accuracy: 0.8298 Epoch 38/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.1964 - accuracy: 0.9546 - val_loss: 0.6983 - val_accuracy: 0.8302 Epoch 39/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.1923 - accuracy: 0.9553 - val_loss: 0.6946 - val_accuracy: 0.8252 Epoch 40/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.1891 - accuracy: 0.9557 - val_loss: 0.7511 - val_accuracy: 0.8259 Epoch 41/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.1895 - accuracy: 0.9566 - val_loss: 0.7387 - val_accuracy: 0.8164 Epoch 42/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.1845 - accuracy: 0.9578 - val_loss: 0.7032 - val_accuracy: 0.8305 Epoch 43/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.1854 - accuracy: 0.9582 - val_loss: 0.7033 - val_accuracy: 0.8224 Epoch 44/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.1721 - accuracy: 0.9603 - val_loss: 0.7204 - val_accuracy: 0.8281 Epoch 45/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.1791 - accuracy: 0.9595 - val_loss: 0.6882 - val_accuracy: 0.8338 Epoch 46/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.1748 - accuracy: 0.9607 - val_loss: 0.7193 - val_accuracy: 0.8244 Epoch 47/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.1685 - accuracy: 0.9626 - val_loss: 0.7565 - val_accuracy: 0.8195 Epoch 48/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.1668 - accuracy: 0.9623 - val_loss: 0.7163 - val_accuracy: 0.8214 Epoch 49/50 1563/1563 [==============================] - 23s 15ms/step - loss: 0.1647 - accuracy: 0.9628 - val_loss: 0.6869 - val_accuracy: 0.8264 Epoch 50/50 1563/1563 [==============================] - 24s 16ms/step - loss: 0.1668 - accuracy: 0.9629 - val_loss: 0.7078 - val_accuracy: 0.8185
time_to_predict9 = end_time-start_time
print('total elapsed time to predict: ' + str(time_to_predict9))
total elapsed time to predict: 0:20:24.852914
loss, accuracy = model9.evaluate(x_test, y_test_cat)
print('test set accuracy: ', accuracy * 100)
313/313 [==============================] - 3s 8ms/step - loss: 0.7078 - accuracy: 0.8185 test set accuracy: 81.84999823570251
# Predictions
preds9 = model9.predict(x_test)
print('shape of preds: ', preds9.shape)
313/313 [==============================] - 1s 4ms/step shape of preds: (10000, 10)
# Plot Prediction Metrics
history9_dict = history9.history
history9_dict.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy'])
history9_df=pd.DataFrame(history9_dict)
history9_df.tail().round(3)
| loss | accuracy | val_loss | val_accuracy | |
|---|---|---|---|---|
| 45 | 0.175 | 0.961 | 0.719 | 0.824 |
| 46 | 0.168 | 0.963 | 0.757 | 0.820 |
| 47 | 0.167 | 0.962 | 0.716 | 0.821 |
| 48 | 0.165 | 0.963 | 0.687 | 0.826 |
| 49 | 0.167 | 0.963 | 0.708 | 0.818 |
pd.DataFrame(history9.history).plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7f86fb755450>
# Confusion Matrix
pred9 = model9.predict(x_test)
pred9 =np.argmax(pred9, axis=1)
313/313 [==============================] - 1s 4ms/step
print_validation_report(y_test, pred9)
Classification Report
precision recall f1-score support
0 0.80 0.85 0.82 1000
1 0.93 0.88 0.91 1000
2 0.80 0.71 0.75 1000
3 0.65 0.71 0.68 1000
4 0.85 0.78 0.81 1000
5 0.84 0.71 0.77 1000
6 0.73 0.95 0.83 1000
7 0.86 0.85 0.86 1000
8 0.91 0.85 0.88 1000
9 0.85 0.90 0.88 1000
accuracy 0.82 10000
macro avg 0.82 0.82 0.82 10000
weighted avg 0.82 0.82 0.82 10000
Accuracy Score: 0.8185
Root Mean Square Error: 1.762271261752855
plot_confusion_matrix(y_test,pred9)
model10=Sequential()
model10.add(Conv2D(filters=128,kernel_size=(3,3),input_shape=(32,32,3),activation='relu', kernel_initializer='he_uniform', padding='same'))
model10.add(BatchNormalization())
model10.add(Conv2D(filters=128,kernel_size=(3,3),activation='relu',kernel_initializer='he_uniform', padding='same'))
model10.add(BatchNormalization())
model10.add(MaxPool2D(pool_size=(2,2)))
model10.add(Dropout(0.20))
model10.add(Conv2D(filters=256,kernel_size=(3,3),activation='relu',kernel_initializer='he_uniform', padding='same'))
model10.add(BatchNormalization())
model10.add(Conv2D(filters=256,kernel_size=(3,3),activation='relu',kernel_initializer='he_uniform', padding='same'))
model10.add(BatchNormalization())
model10.add(MaxPool2D(pool_size=(2,2)))
model10.add(Dropout(0.25))
model10.add(Conv2D(filters=512,kernel_size=(3,3),activation='relu',kernel_initializer='he_uniform', padding='same'))
model10.add(MaxPool2D(pool_size=(2,2)))
model10.add(Dropout(0.3))
model10.add(Flatten())
model10.add(Dense(384,activation='relu',kernel_regularizer=tf.keras.regularizers.L2(0.001)))
model10.add(BatchNormalization())
model10.add(Dropout(0.4))
model10.add(Dense(10,activation='softmax'))
model10.summary()
Model: "sequential_5"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_18 (Conv2D) (None, 32, 32, 128) 3584
batch_normalization_9 (Batc (None, 32, 32, 128) 512
hNormalization)
conv2d_19 (Conv2D) (None, 32, 32, 128) 147584
batch_normalization_10 (Bat (None, 32, 32, 128) 512
chNormalization)
max_pooling2d_14 (MaxPoolin (None, 16, 16, 128) 0
g2D)
dropout_19 (Dropout) (None, 16, 16, 128) 0
conv2d_20 (Conv2D) (None, 16, 16, 256) 295168
batch_normalization_11 (Bat (None, 16, 16, 256) 1024
chNormalization)
conv2d_21 (Conv2D) (None, 16, 16, 256) 590080
batch_normalization_12 (Bat (None, 16, 16, 256) 1024
chNormalization)
max_pooling2d_15 (MaxPoolin (None, 8, 8, 256) 0
g2D)
dropout_20 (Dropout) (None, 8, 8, 256) 0
conv2d_22 (Conv2D) (None, 8, 8, 512) 1180160
max_pooling2d_16 (MaxPoolin (None, 4, 4, 512) 0
g2D)
dropout_21 (Dropout) (None, 4, 4, 512) 0
flatten_5 (Flatten) (None, 8192) 0
dense_10 (Dense) (None, 384) 3146112
batch_normalization_13 (Bat (None, 384) 1536
chNormalization)
dropout_22 (Dropout) (None, 384) 0
dense_11 (Dense) (None, 10) 3850
=================================================================
Total params: 5,371,146
Trainable params: 5,368,842
Non-trainable params: 2,304
_________________________________________________________________
keras.utils.plot_model(model10, "CIFAR10.png", show_shapes=True)
model10.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
start_time = datetime.datetime.now()
history10 = model10.fit(x_train,y_train_cat,epochs=50,validation_data=(x_test,y_test_cat))
end_time = datetime.datetime.now()
Epoch 1/50 1563/1563 [==============================] - 36s 22ms/step - loss: 1.6472 - accuracy: 0.5733 - val_loss: 1.1608 - val_accuracy: 0.7030 Epoch 2/50 1563/1563 [==============================] - 33s 21ms/step - loss: 1.1985 - accuracy: 0.7212 - val_loss: 1.1961 - val_accuracy: 0.7355 Epoch 3/50 1563/1563 [==============================] - 33s 21ms/step - loss: 1.0742 - accuracy: 0.7727 - val_loss: 1.1852 - val_accuracy: 0.7194 Epoch 4/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.9256 - accuracy: 0.8073 - val_loss: 0.9484 - val_accuracy: 0.7943 Epoch 5/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.8625 - accuracy: 0.8297 - val_loss: 0.9063 - val_accuracy: 0.8126 Epoch 6/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.7837 - accuracy: 0.8507 - val_loss: 0.8360 - val_accuracy: 0.8313 Epoch 7/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.7326 - accuracy: 0.8661 - val_loss: 0.8445 - val_accuracy: 0.8295 Epoch 8/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.6841 - accuracy: 0.8813 - val_loss: 0.8593 - val_accuracy: 0.8275 Epoch 9/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.6557 - accuracy: 0.8903 - val_loss: 0.7994 - val_accuracy: 0.8474 Epoch 10/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.6266 - accuracy: 0.8989 - val_loss: 0.8240 - val_accuracy: 0.8371 Epoch 11/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.5965 - accuracy: 0.9077 - val_loss: 0.7806 - val_accuracy: 0.8551 Epoch 12/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.5742 - accuracy: 0.9148 - val_loss: 0.7651 - val_accuracy: 0.8537 Epoch 13/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.5408 - accuracy: 0.9225 - val_loss: 0.8055 - val_accuracy: 0.8450 Epoch 14/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.5269 - accuracy: 0.9258 - val_loss: 0.7306 - val_accuracy: 0.8669 Epoch 15/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.5013 - accuracy: 0.9313 - val_loss: 0.7440 - val_accuracy: 0.8611 Epoch 16/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.4817 - accuracy: 0.9351 - val_loss: 0.7720 - val_accuracy: 0.8465 Epoch 17/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.4642 - accuracy: 0.9379 - val_loss: 0.7498 - val_accuracy: 0.8629 Epoch 18/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.4586 - accuracy: 0.9419 - val_loss: 0.7245 - val_accuracy: 0.8632 Epoch 19/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.4476 - accuracy: 0.9435 - val_loss: 0.7730 - val_accuracy: 0.8523 Epoch 20/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.4246 - accuracy: 0.9475 - val_loss: 0.7393 - val_accuracy: 0.8640 Epoch 21/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.4160 - accuracy: 0.9488 - val_loss: 0.6897 - val_accuracy: 0.8783 Epoch 22/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.4106 - accuracy: 0.9495 - val_loss: 0.6843 - val_accuracy: 0.8770 Epoch 23/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.3964 - accuracy: 0.9534 - val_loss: 0.7072 - val_accuracy: 0.8673 Epoch 24/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.3881 - accuracy: 0.9537 - val_loss: 0.7687 - val_accuracy: 0.8521 Epoch 25/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.3789 - accuracy: 0.9565 - val_loss: 0.7446 - val_accuracy: 0.8635 Epoch 26/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.3723 - accuracy: 0.9580 - val_loss: 0.7144 - val_accuracy: 0.8661 Epoch 27/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.3623 - accuracy: 0.9586 - val_loss: 0.7249 - val_accuracy: 0.8577 Epoch 28/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.3448 - accuracy: 0.9624 - val_loss: 0.6616 - val_accuracy: 0.8775 Epoch 29/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.3545 - accuracy: 0.9605 - val_loss: 0.6728 - val_accuracy: 0.8754 Epoch 30/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.3309 - accuracy: 0.9651 - val_loss: 0.6770 - val_accuracy: 0.8781 Epoch 31/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.3361 - accuracy: 0.9642 - val_loss: 0.6891 - val_accuracy: 0.8757 Epoch 32/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.3309 - accuracy: 0.9648 - val_loss: 0.7076 - val_accuracy: 0.8682 Epoch 33/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.3267 - accuracy: 0.9659 - val_loss: 0.7928 - val_accuracy: 0.8418 Epoch 34/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.3134 - accuracy: 0.9673 - val_loss: 0.6733 - val_accuracy: 0.8728 Epoch 35/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.3134 - accuracy: 0.9682 - val_loss: 0.7214 - val_accuracy: 0.8635 Epoch 36/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.2990 - accuracy: 0.9703 - val_loss: 0.6758 - val_accuracy: 0.8721 Epoch 37/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.2963 - accuracy: 0.9709 - val_loss: 0.6697 - val_accuracy: 0.8762 Epoch 38/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.3045 - accuracy: 0.9691 - val_loss: 0.7641 - val_accuracy: 0.8525 Epoch 39/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.2870 - accuracy: 0.9723 - val_loss: 0.6489 - val_accuracy: 0.8758 Epoch 40/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.2872 - accuracy: 0.9708 - val_loss: 0.6610 - val_accuracy: 0.8799 Epoch 41/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.2792 - accuracy: 0.9728 - val_loss: 0.6997 - val_accuracy: 0.8606 Epoch 42/50 1563/1563 [==============================] - 34s 22ms/step - loss: 0.2921 - accuracy: 0.9709 - val_loss: 0.6929 - val_accuracy: 0.8767 Epoch 43/50 1563/1563 [==============================] - 33s 21ms/step - loss: 0.2733 - accuracy: 0.9727 - val_loss: 0.6441 - val_accuracy: 0.8761 Epoch 44/50 1563/1563 [==============================] - 34s 22ms/step - loss: 0.2736 - accuracy: 0.9730 - val_loss: 0.7316 - val_accuracy: 0.8635 Epoch 45/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.2682 - accuracy: 0.9738 - val_loss: 0.6933 - val_accuracy: 0.8695 Epoch 46/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.2664 - accuracy: 0.9750 - val_loss: 0.6445 - val_accuracy: 0.8771 Epoch 47/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.2546 - accuracy: 0.9761 - val_loss: 0.6830 - val_accuracy: 0.8713 Epoch 48/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.2587 - accuracy: 0.9752 - val_loss: 0.6477 - val_accuracy: 0.8793 Epoch 49/50 1563/1563 [==============================] - 32s 21ms/step - loss: 0.2640 - accuracy: 0.9745 - val_loss: 0.6304 - val_accuracy: 0.8839 Epoch 50/50 1563/1563 [==============================] - 32s 20ms/step - loss: 0.2532 - accuracy: 0.9760 - val_loss: 0.8543 - val_accuracy: 0.8475
time_to_predict10 = end_time-start_time
print('total elapsed time to predict: ' + str(time_to_predict10))
total elapsed time to predict: 0:27:23.775982
loss, accuracy = model10.evaluate(x_test, y_test_cat)
print('test set accuracy: ', accuracy * 100)
313/313 [==============================] - 2s 7ms/step - loss: 0.8543 - accuracy: 0.8475 test set accuracy: 84.75000262260437
# Predictions
preds10 = model10.predict(x_test)
print('shape of preds: ', preds10.shape)
313/313 [==============================] - 2s 5ms/step shape of preds: (10000, 10)
# Plot Prediction Metrics
history10_dict = history10.history
history10_dict.keys()
dict_keys(['loss', 'accuracy', 'val_loss', 'val_accuracy'])
history10_df=pd.DataFrame(history10_dict)
history10_df.tail().round(3)
| loss | accuracy | val_loss | val_accuracy | |
|---|---|---|---|---|
| 45 | 0.266 | 0.975 | 0.644 | 0.877 |
| 46 | 0.255 | 0.976 | 0.683 | 0.871 |
| 47 | 0.259 | 0.975 | 0.648 | 0.879 |
| 48 | 0.264 | 0.975 | 0.630 | 0.884 |
| 49 | 0.253 | 0.976 | 0.854 | 0.848 |
losses10 = history10.history['loss']
accs10 = history10.history['accuracy']
val_losses10 = history10.history['val_loss']
val_accs10 = history10.history['val_accuracy']
epochs10 = len(losses10)
pd.DataFrame(history10.history).plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7f86fb055f50>
# Confusion Matrix
pred10 = model10.predict(x_test)
pred10 =np.argmax(pred10, axis=1)
313/313 [==============================] - 2s 5ms/step
print_validation_report(y_test, pred10)
Classification Report
precision recall f1-score support
0 0.79 0.92 0.85 1000
1 0.97 0.92 0.94 1000
2 0.85 0.74 0.79 1000
3 0.86 0.61 0.71 1000
4 0.73 0.93 0.81 1000
5 0.92 0.67 0.78 1000
6 0.76 0.95 0.84 1000
7 0.91 0.90 0.90 1000
8 0.88 0.91 0.90 1000
9 0.90 0.94 0.92 1000
accuracy 0.85 10000
macro avg 0.86 0.85 0.84 10000
weighted avg 0.86 0.85 0.84 10000
Accuracy Score: 0.8475
Root Mean Square Error: 1.5606408939919523
plot_confusion_matrix(y_test,pred10)
model11=Sequential()
model11.add(Conv2D(filters=128,kernel_size=(3,3),input_shape=(32,32,3),activation='relu', kernel_initializer='he_uniform', padding='same'))
model11.add(BatchNormalization())
model11.add(Conv2D(filters=128,kernel_size=(3,3),activation='relu',kernel_initializer='he_uniform', padding='same'))
model11.add(BatchNormalization())
model11.add(MaxPool2D(pool_size=(2,2)))
model11.add(Dropout(0.20))
model11.add(Conv2D(filters=256,kernel_size=(3,3),activation='relu',kernel_initializer='he_uniform', padding='same'))
model11.add(BatchNormalization())
model11.add(Conv2D(filters=256,kernel_size=(3,3),activation='relu',kernel_initializer='he_uniform', padding='same'))
model11.add(BatchNormalization())
model11.add(MaxPool2D(pool_size=(2,2)))
model11.add(Dropout(0.25))
model11.add(Conv2D(filters=512,kernel_size=(3,3),activation='relu',kernel_initializer='he_uniform', padding='same'))
model11.add(MaxPool2D(pool_size=(2,2)))
model11.add(Dropout(0.3))
model11.add(Flatten())
model11.add(Dense(384,activation='relu',kernel_regularizer=tf.keras.regularizers.L2(0.001)))
model11.add(BatchNormalization())
model11.add(Dropout(0.4))
model11.add(Dense(10,activation='softmax'))
datagen = ImageDataGenerator(width_shift_range=0.1, height_shift_range=0.1, horizontal_flip=True,rotation_range=20)
it_train = datagen.flow(x_train,y_train_cat)
steps = int(x_train.shape[0] / 64)
model11.summary()
Model: "sequential_6"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d_23 (Conv2D) (None, 32, 32, 128) 3584
batch_normalization_14 (Bat (None, 32, 32, 128) 512
chNormalization)
conv2d_24 (Conv2D) (None, 32, 32, 128) 147584
batch_normalization_15 (Bat (None, 32, 32, 128) 512
chNormalization)
max_pooling2d_17 (MaxPoolin (None, 16, 16, 128) 0
g2D)
dropout_23 (Dropout) (None, 16, 16, 128) 0
conv2d_25 (Conv2D) (None, 16, 16, 256) 295168
batch_normalization_16 (Bat (None, 16, 16, 256) 1024
chNormalization)
conv2d_26 (Conv2D) (None, 16, 16, 256) 590080
batch_normalization_17 (Bat (None, 16, 16, 256) 1024
chNormalization)
max_pooling2d_18 (MaxPoolin (None, 8, 8, 256) 0
g2D)
dropout_24 (Dropout) (None, 8, 8, 256) 0
conv2d_27 (Conv2D) (None, 8, 8, 512) 1180160
max_pooling2d_19 (MaxPoolin (None, 4, 4, 512) 0
g2D)
dropout_25 (Dropout) (None, 4, 4, 512) 0
flatten_6 (Flatten) (None, 8192) 0
dense_12 (Dense) (None, 384) 3146112
batch_normalization_18 (Bat (None, 384) 1536
chNormalization)
dropout_26 (Dropout) (None, 384) 0
dense_13 (Dense) (None, 10) 3850
=================================================================
Total params: 5,371,146
Trainable params: 5,368,842
Non-trainable params: 2,304
_________________________________________________________________
keras.utils.plot_model(model11, "CIFAR10.png", show_shapes=True)
model11.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
start_time = datetime.datetime.now()
history11 = model11.fit(it_train,epochs=50,validation_data=(x_test,y_test_cat))
end_time = datetime.datetime.now()
Epoch 1/50 1563/1563 [==============================] - 49s 31ms/step - loss: 1.8716 - accuracy: 0.4877 - val_loss: 1.3986 - val_accuracy: 0.6132 Epoch 2/50 1563/1563 [==============================] - 41s 26ms/step - loss: 1.3522 - accuracy: 0.6520 - val_loss: 1.4292 - val_accuracy: 0.6469 Epoch 3/50 1563/1563 [==============================] - 48s 31ms/step - loss: 1.2277 - accuracy: 0.7059 - val_loss: 1.1709 - val_accuracy: 0.7245 Epoch 4/50 1563/1563 [==============================] - 46s 30ms/step - loss: 1.1001 - accuracy: 0.7403 - val_loss: 1.0774 - val_accuracy: 0.7430 Epoch 5/50 1563/1563 [==============================] - 38s 25ms/step - loss: 1.0218 - accuracy: 0.7625 - val_loss: 1.0630 - val_accuracy: 0.7451 Epoch 6/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.9501 - accuracy: 0.7796 - val_loss: 1.0541 - val_accuracy: 0.7580 Epoch 7/50 1563/1563 [==============================] - 40s 25ms/step - loss: 0.9076 - accuracy: 0.7915 - val_loss: 0.9488 - val_accuracy: 0.7809 Epoch 8/50 1563/1563 [==============================] - 38s 25ms/step - loss: 0.8836 - accuracy: 0.8016 - val_loss: 0.9708 - val_accuracy: 0.7792 Epoch 9/50 1563/1563 [==============================] - 40s 25ms/step - loss: 0.8470 - accuracy: 0.8107 - val_loss: 0.8473 - val_accuracy: 0.8092 Epoch 10/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.8259 - accuracy: 0.8183 - val_loss: 0.8291 - val_accuracy: 0.8204 Epoch 11/50 1563/1563 [==============================] - 38s 24ms/step - loss: 0.8034 - accuracy: 0.8254 - val_loss: 0.8004 - val_accuracy: 0.8303 Epoch 12/50 1563/1563 [==============================] - 40s 25ms/step - loss: 0.7838 - accuracy: 0.8307 - val_loss: 0.7772 - val_accuracy: 0.8310 Epoch 13/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.7677 - accuracy: 0.8345 - val_loss: 0.6756 - val_accuracy: 0.8630 Epoch 14/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.7443 - accuracy: 0.8424 - val_loss: 0.8414 - val_accuracy: 0.8185 Epoch 15/50 1563/1563 [==============================] - 38s 25ms/step - loss: 0.7352 - accuracy: 0.8431 - val_loss: 0.7757 - val_accuracy: 0.8358 Epoch 16/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.7332 - accuracy: 0.8458 - val_loss: 0.7742 - val_accuracy: 0.8349 Epoch 17/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.7094 - accuracy: 0.8520 - val_loss: 0.7052 - val_accuracy: 0.8554 Epoch 18/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.6965 - accuracy: 0.8556 - val_loss: 0.6921 - val_accuracy: 0.8549 Epoch 19/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.6867 - accuracy: 0.8587 - val_loss: 0.6849 - val_accuracy: 0.8596 Epoch 20/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.6818 - accuracy: 0.8606 - val_loss: 0.6597 - val_accuracy: 0.8706 Epoch 21/50 1563/1563 [==============================] - 40s 25ms/step - loss: 0.6619 - accuracy: 0.8655 - val_loss: 0.6365 - val_accuracy: 0.8756 Epoch 22/50 1563/1563 [==============================] - 40s 26ms/step - loss: 0.6583 - accuracy: 0.8657 - val_loss: 0.7031 - val_accuracy: 0.8537 Epoch 23/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.6460 - accuracy: 0.8689 - val_loss: 0.6423 - val_accuracy: 0.8707 Epoch 24/50 1563/1563 [==============================] - 40s 26ms/step - loss: 0.6381 - accuracy: 0.8704 - val_loss: 0.6437 - val_accuracy: 0.8689 Epoch 25/50 1563/1563 [==============================] - 41s 26ms/step - loss: 0.6273 - accuracy: 0.8744 - val_loss: 0.6629 - val_accuracy: 0.8634 Epoch 26/50 1563/1563 [==============================] - 40s 26ms/step - loss: 0.6206 - accuracy: 0.8755 - val_loss: 0.6055 - val_accuracy: 0.8804 Epoch 27/50 1563/1563 [==============================] - 40s 25ms/step - loss: 0.6094 - accuracy: 0.8769 - val_loss: 0.6514 - val_accuracy: 0.8718 Epoch 28/50 1563/1563 [==============================] - 40s 26ms/step - loss: 0.6105 - accuracy: 0.8791 - val_loss: 0.6633 - val_accuracy: 0.8613 Epoch 29/50 1563/1563 [==============================] - 40s 26ms/step - loss: 0.5921 - accuracy: 0.8824 - val_loss: 0.5953 - val_accuracy: 0.8836 Epoch 30/50 1563/1563 [==============================] - 40s 25ms/step - loss: 0.5918 - accuracy: 0.8824 - val_loss: 0.6413 - val_accuracy: 0.8689 Epoch 31/50 1563/1563 [==============================] - 40s 26ms/step - loss: 0.5879 - accuracy: 0.8835 - val_loss: 0.6073 - val_accuracy: 0.8783 Epoch 32/50 1563/1563 [==============================] - 40s 26ms/step - loss: 0.5799 - accuracy: 0.8860 - val_loss: 0.6264 - val_accuracy: 0.8675 Epoch 33/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5707 - accuracy: 0.8866 - val_loss: 0.6241 - val_accuracy: 0.8720 Epoch 34/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5619 - accuracy: 0.8896 - val_loss: 0.6574 - val_accuracy: 0.8634 Epoch 35/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5621 - accuracy: 0.8902 - val_loss: 0.5833 - val_accuracy: 0.8851 Epoch 36/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5530 - accuracy: 0.8916 - val_loss: 0.6166 - val_accuracy: 0.8716 Epoch 37/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5463 - accuracy: 0.8946 - val_loss: 0.6245 - val_accuracy: 0.8715 Epoch 38/50 1563/1563 [==============================] - 40s 25ms/step - loss: 0.5397 - accuracy: 0.8942 - val_loss: 0.5625 - val_accuracy: 0.8902 Epoch 39/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5418 - accuracy: 0.8937 - val_loss: 0.5768 - val_accuracy: 0.8805 Epoch 40/50 1563/1563 [==============================] - 38s 25ms/step - loss: 0.5296 - accuracy: 0.8955 - val_loss: 0.6349 - val_accuracy: 0.8675 Epoch 41/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5336 - accuracy: 0.8977 - val_loss: 0.5796 - val_accuracy: 0.8865 Epoch 42/50 1563/1563 [==============================] - 38s 25ms/step - loss: 0.5244 - accuracy: 0.8983 - val_loss: 0.5832 - val_accuracy: 0.8832 Epoch 43/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5242 - accuracy: 0.8980 - val_loss: 0.5965 - val_accuracy: 0.8791 Epoch 44/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5147 - accuracy: 0.9010 - val_loss: 0.5825 - val_accuracy: 0.8790 Epoch 45/50 1563/1563 [==============================] - 40s 26ms/step - loss: 0.5159 - accuracy: 0.9013 - val_loss: 0.5528 - val_accuracy: 0.8888 Epoch 46/50 1563/1563 [==============================] - 40s 25ms/step - loss: 0.5171 - accuracy: 0.9005 - val_loss: 0.5971 - val_accuracy: 0.8799 Epoch 47/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5044 - accuracy: 0.9046 - val_loss: 0.5348 - val_accuracy: 0.8949 Epoch 48/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.4981 - accuracy: 0.9049 - val_loss: 0.5627 - val_accuracy: 0.8913 Epoch 49/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.4966 - accuracy: 0.9046 - val_loss: 0.5449 - val_accuracy: 0.8958 Epoch 50/50 1563/1563 [==============================] - 39s 25ms/step - loss: 0.4902 - accuracy: 0.9078 - val_loss: 0.5250 - val_accuracy: 0.8985
time_to_predict11 = end_time-start_time
print('total elapsed time to predict: ' + str(time_to_predict11))
total elapsed time to predict: 0:33:23.807117
loss, accuracy = model11.evaluate(x_test, y_test_cat)
print('test set accuracy: ', accuracy * 100)
313/313 [==============================] - 2s 7ms/step - loss: 0.5250 - accuracy: 0.8985 test set accuracy: 89.85000252723694
pd.DataFrame(history11.history).plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7f8585481550>
# Confusion Matrix
pred11 = model11.predict(x_test)
pred11 =np.argmax(pred11, axis=1)
313/313 [==============================] - 2s 5ms/step
print_validation_report(y_test, pred11)
Classification Report
precision recall f1-score support
0 0.90 0.92 0.91 1000
1 0.93 0.98 0.95 1000
2 0.89 0.87 0.88 1000
3 0.82 0.80 0.81 1000
4 0.91 0.87 0.89 1000
5 0.93 0.75 0.83 1000
6 0.90 0.95 0.92 1000
7 0.84 0.96 0.90 1000
8 0.96 0.93 0.94 1000
9 0.93 0.94 0.94 1000
accuracy 0.90 10000
macro avg 0.90 0.90 0.90 10000
weighted avg 0.90 0.90 0.90 10000
Accuracy Score: 0.8985
Root Mean Square Error: 1.3096182649917494
plot_confusion_matrix(y_test,pred11)
model12=Sequential()
model12.add(Conv2D(filters=128,kernel_size=(3,3),input_shape=(32,32,3),activation='relu', kernel_initializer='he_uniform', padding='same'))
model12.add(BatchNormalization())
model12.add(Conv2D(filters=128,kernel_size=(3,3),activation='relu',kernel_initializer='he_uniform', padding='same'))
model12.add(BatchNormalization())
model12.add(MaxPool2D(pool_size=(2,2)))
model12.add(Dropout(0.20))
model12.add(Conv2D(filters=256,kernel_size=(3,3),activation='relu',kernel_initializer='he_uniform', padding='same'))
model12.add(BatchNormalization())
model12.add(Conv2D(filters=256,kernel_size=(3,3),activation='relu',kernel_initializer='he_uniform', padding='same'))
model12.add(BatchNormalization())
model12.add(MaxPool2D(pool_size=(2,2)))
model12.add(Dropout(0.25))
model12.add(Conv2D(filters=512,kernel_size=(3,3),activation='relu',kernel_initializer='he_uniform', padding='same'))
model12.add(MaxPool2D(pool_size=(2,2)))
model12.add(Dropout(0.3))
model12.add(Flatten())
model12.add(Dense(384,activation='relu',kernel_regularizer=tf.keras.regularizers.L2(0.001)))
model12.add(BatchNormalization())
model12.add(Dropout(0.4))
model12.add(Dense(10,activation='softmax'))
datagen = ImageDataGenerator(width_shift_range=0.1, height_shift_range=0.1, horizontal_flip=True,rotation_range=20)
it_train = datagen.flow(x_train,y_train_cat)
steps = int(x_train.shape[0] / 64)
model12.summary()
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
conv2d (Conv2D) (None, 32, 32, 128) 3584
batch_normalization (BatchN (None, 32, 32, 128) 512
ormalization)
conv2d_1 (Conv2D) (None, 32, 32, 128) 147584
batch_normalization_1 (Batc (None, 32, 32, 128) 512
hNormalization)
max_pooling2d (MaxPooling2D (None, 16, 16, 128) 0
)
dropout (Dropout) (None, 16, 16, 128) 0
conv2d_2 (Conv2D) (None, 16, 16, 256) 295168
batch_normalization_2 (Batc (None, 16, 16, 256) 1024
hNormalization)
conv2d_3 (Conv2D) (None, 16, 16, 256) 590080
batch_normalization_3 (Batc (None, 16, 16, 256) 1024
hNormalization)
max_pooling2d_1 (MaxPooling (None, 8, 8, 256) 0
2D)
dropout_1 (Dropout) (None, 8, 8, 256) 0
conv2d_4 (Conv2D) (None, 8, 8, 512) 1180160
max_pooling2d_2 (MaxPooling (None, 4, 4, 512) 0
2D)
dropout_2 (Dropout) (None, 4, 4, 512) 0
flatten (Flatten) (None, 8192) 0
dense (Dense) (None, 384) 3146112
batch_normalization_4 (Batc (None, 384) 1536
hNormalization)
dropout_3 (Dropout) (None, 384) 0
dense_1 (Dense) (None, 10) 3850
=================================================================
Total params: 5,371,146
Trainable params: 5,368,842
Non-trainable params: 2,304
_________________________________________________________________
model12.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
start_time = datetime.datetime.now()
history12 = model12.fit(it_train,epochs=200,validation_data=(x_test,y_test_cat))
end_time = datetime.datetime.now()
Epoch 1/200 1563/1563 [==============================] - 64s 33ms/step - loss: 1.8307 - accuracy: 0.5055 - val_loss: 1.3491 - val_accuracy: 0.6225 Epoch 2/200 1563/1563 [==============================] - 45s 29ms/step - loss: 1.3450 - accuracy: 0.6569 - val_loss: 4.5231 - val_accuracy: 0.3673 Epoch 3/200 1563/1563 [==============================] - 47s 30ms/step - loss: 1.2237 - accuracy: 0.7057 - val_loss: 1.2048 - val_accuracy: 0.7148 Epoch 4/200 1563/1563 [==============================] - 39s 25ms/step - loss: 1.1053 - accuracy: 0.7390 - val_loss: 1.0265 - val_accuracy: 0.7666 Epoch 5/200 1563/1563 [==============================] - 39s 25ms/step - loss: 1.0237 - accuracy: 0.7622 - val_loss: 0.9502 - val_accuracy: 0.7845 Epoch 6/200 1563/1563 [==============================] - 40s 26ms/step - loss: 0.9519 - accuracy: 0.7829 - val_loss: 0.9750 - val_accuracy: 0.7783 Epoch 7/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.9103 - accuracy: 0.7956 - val_loss: 0.8936 - val_accuracy: 0.8097 Epoch 8/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.8815 - accuracy: 0.8041 - val_loss: 1.0294 - val_accuracy: 0.7634 Epoch 9/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.8473 - accuracy: 0.8133 - val_loss: 0.8480 - val_accuracy: 0.8120 Epoch 10/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.8294 - accuracy: 0.8199 - val_loss: 1.2320 - val_accuracy: 0.7081 Epoch 11/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.8090 - accuracy: 0.8231 - val_loss: 0.8638 - val_accuracy: 0.8119 Epoch 12/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.7894 - accuracy: 0.8330 - val_loss: 0.7774 - val_accuracy: 0.8351 Epoch 13/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.7699 - accuracy: 0.8376 - val_loss: 0.7763 - val_accuracy: 0.8379 Epoch 14/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.7505 - accuracy: 0.8440 - val_loss: 0.8008 - val_accuracy: 0.8230 Epoch 15/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.7446 - accuracy: 0.8453 - val_loss: 0.8884 - val_accuracy: 0.8063 Epoch 16/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.7224 - accuracy: 0.8495 - val_loss: 0.7656 - val_accuracy: 0.8355 Epoch 17/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.7157 - accuracy: 0.8519 - val_loss: 0.7249 - val_accuracy: 0.8458 Epoch 18/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.6981 - accuracy: 0.8574 - val_loss: 0.7673 - val_accuracy: 0.8388 Epoch 19/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.6980 - accuracy: 0.8573 - val_loss: 0.7301 - val_accuracy: 0.8453 Epoch 20/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.6778 - accuracy: 0.8627 - val_loss: 0.6542 - val_accuracy: 0.8701 Epoch 21/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.6697 - accuracy: 0.8640 - val_loss: 0.8038 - val_accuracy: 0.8278 Epoch 22/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.6616 - accuracy: 0.8654 - val_loss: 0.6684 - val_accuracy: 0.8664 Epoch 23/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.6495 - accuracy: 0.8702 - val_loss: 0.7097 - val_accuracy: 0.8548 Epoch 24/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.6377 - accuracy: 0.8710 - val_loss: 0.5995 - val_accuracy: 0.8866 Epoch 25/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.6360 - accuracy: 0.8718 - val_loss: 0.7255 - val_accuracy: 0.8449 Epoch 26/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.6257 - accuracy: 0.8743 - val_loss: 0.6079 - val_accuracy: 0.8811 Epoch 27/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.6200 - accuracy: 0.8774 - val_loss: 0.6394 - val_accuracy: 0.8740 Epoch 28/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.6146 - accuracy: 0.8780 - val_loss: 0.6095 - val_accuracy: 0.8806 Epoch 29/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.6126 - accuracy: 0.8793 - val_loss: 0.6487 - val_accuracy: 0.8683 Epoch 30/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.5942 - accuracy: 0.8830 - val_loss: 0.6639 - val_accuracy: 0.8629 Epoch 31/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.5868 - accuracy: 0.8858 - val_loss: 0.6155 - val_accuracy: 0.8724 Epoch 32/200 1563/1563 [==============================] - 37s 24ms/step - loss: 0.5754 - accuracy: 0.8871 - val_loss: 0.5880 - val_accuracy: 0.8833 Epoch 33/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.5710 - accuracy: 0.8870 - val_loss: 0.6645 - val_accuracy: 0.8606 Epoch 34/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.5673 - accuracy: 0.8881 - val_loss: 0.5599 - val_accuracy: 0.8924 Epoch 35/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.5634 - accuracy: 0.8920 - val_loss: 0.6835 - val_accuracy: 0.8537 Epoch 36/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.5561 - accuracy: 0.8913 - val_loss: 0.5960 - val_accuracy: 0.8777 Epoch 37/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.5538 - accuracy: 0.8915 - val_loss: 0.6200 - val_accuracy: 0.8749 Epoch 38/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.5456 - accuracy: 0.8949 - val_loss: 0.6463 - val_accuracy: 0.8679 Epoch 39/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.5494 - accuracy: 0.8939 - val_loss: 0.5705 - val_accuracy: 0.8884 Epoch 40/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.5449 - accuracy: 0.8963 - val_loss: 0.6848 - val_accuracy: 0.8576 Epoch 41/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5357 - accuracy: 0.8984 - val_loss: 0.6237 - val_accuracy: 0.8730 Epoch 42/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.5342 - accuracy: 0.8980 - val_loss: 0.6355 - val_accuracy: 0.8694 Epoch 43/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.5287 - accuracy: 0.9003 - val_loss: 0.6257 - val_accuracy: 0.8703 Epoch 44/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5274 - accuracy: 0.8989 - val_loss: 0.5900 - val_accuracy: 0.8816 Epoch 45/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5171 - accuracy: 0.9024 - val_loss: 0.5965 - val_accuracy: 0.8807 Epoch 46/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.5196 - accuracy: 0.9023 - val_loss: 0.5757 - val_accuracy: 0.8850 Epoch 47/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.5182 - accuracy: 0.9025 - val_loss: 0.5376 - val_accuracy: 0.8939 Epoch 48/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.5084 - accuracy: 0.9051 - val_loss: 0.6218 - val_accuracy: 0.8763 Epoch 49/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.5059 - accuracy: 0.9044 - val_loss: 0.5487 - val_accuracy: 0.8934 Epoch 50/200 1563/1563 [==============================] - 37s 24ms/step - loss: 0.4987 - accuracy: 0.9067 - val_loss: 0.6165 - val_accuracy: 0.8722 Epoch 51/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4983 - accuracy: 0.9051 - val_loss: 0.5203 - val_accuracy: 0.9021 Epoch 52/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4958 - accuracy: 0.9085 - val_loss: 0.5921 - val_accuracy: 0.8827 Epoch 53/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.4842 - accuracy: 0.9109 - val_loss: 0.5897 - val_accuracy: 0.8787 Epoch 54/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.4781 - accuracy: 0.9121 - val_loss: 0.6761 - val_accuracy: 0.8535 Epoch 55/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.4883 - accuracy: 0.9116 - val_loss: 0.5579 - val_accuracy: 0.8916 Epoch 56/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4812 - accuracy: 0.9098 - val_loss: 0.5780 - val_accuracy: 0.8843 Epoch 57/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4696 - accuracy: 0.9140 - val_loss: 0.6168 - val_accuracy: 0.8756 Epoch 58/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4754 - accuracy: 0.9125 - val_loss: 0.5580 - val_accuracy: 0.8876 Epoch 59/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4722 - accuracy: 0.9132 - val_loss: 0.6570 - val_accuracy: 0.8617 Epoch 60/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4713 - accuracy: 0.9136 - val_loss: 0.5480 - val_accuracy: 0.8889 Epoch 61/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4596 - accuracy: 0.9165 - val_loss: 0.5885 - val_accuracy: 0.8830 Epoch 62/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.4595 - accuracy: 0.9152 - val_loss: 0.5618 - val_accuracy: 0.8885 Epoch 63/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4609 - accuracy: 0.9162 - val_loss: 0.5496 - val_accuracy: 0.8939 Epoch 64/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4597 - accuracy: 0.9162 - val_loss: 0.5373 - val_accuracy: 0.8981 Epoch 65/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.4548 - accuracy: 0.9185 - val_loss: 0.5653 - val_accuracy: 0.8864 Epoch 66/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.4522 - accuracy: 0.9187 - val_loss: 0.5558 - val_accuracy: 0.8913 Epoch 67/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4491 - accuracy: 0.9192 - val_loss: 0.5314 - val_accuracy: 0.8958 Epoch 68/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4549 - accuracy: 0.9185 - val_loss: 0.5653 - val_accuracy: 0.8857 Epoch 69/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.4396 - accuracy: 0.9204 - val_loss: 0.5677 - val_accuracy: 0.8912 Epoch 70/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.4447 - accuracy: 0.9209 - val_loss: 0.5776 - val_accuracy: 0.8872 Epoch 71/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4408 - accuracy: 0.9210 - val_loss: 0.5620 - val_accuracy: 0.8891 Epoch 72/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4383 - accuracy: 0.9213 - val_loss: 0.5450 - val_accuracy: 0.8925 Epoch 73/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.4353 - accuracy: 0.9226 - val_loss: 0.5054 - val_accuracy: 0.9013 Epoch 74/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4406 - accuracy: 0.9211 - val_loss: 0.5451 - val_accuracy: 0.8931 Epoch 75/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4370 - accuracy: 0.9234 - val_loss: 0.5997 - val_accuracy: 0.8779 Epoch 76/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4308 - accuracy: 0.9251 - val_loss: 0.5400 - val_accuracy: 0.8968 Epoch 77/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.4367 - accuracy: 0.9241 - val_loss: 0.5386 - val_accuracy: 0.8960 Epoch 78/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4272 - accuracy: 0.9244 - val_loss: 0.5659 - val_accuracy: 0.8887 Epoch 79/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.4305 - accuracy: 0.9229 - val_loss: 0.6217 - val_accuracy: 0.8681 Epoch 80/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4226 - accuracy: 0.9265 - val_loss: 0.5175 - val_accuracy: 0.8997 Epoch 81/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4234 - accuracy: 0.9274 - val_loss: 0.5428 - val_accuracy: 0.8933 Epoch 82/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4184 - accuracy: 0.9253 - val_loss: 0.5323 - val_accuracy: 0.8913 Epoch 83/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.4186 - accuracy: 0.9268 - val_loss: 0.5225 - val_accuracy: 0.9016 Epoch 84/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4149 - accuracy: 0.9276 - val_loss: 0.5477 - val_accuracy: 0.8903 Epoch 85/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4192 - accuracy: 0.9267 - val_loss: 0.5402 - val_accuracy: 0.8973 Epoch 86/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.4180 - accuracy: 0.9273 - val_loss: 0.5253 - val_accuracy: 0.9023 Epoch 87/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.4115 - accuracy: 0.9291 - val_loss: 0.5129 - val_accuracy: 0.9010 Epoch 88/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4060 - accuracy: 0.9295 - val_loss: 0.6197 - val_accuracy: 0.8758 Epoch 89/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4078 - accuracy: 0.9303 - val_loss: 0.5251 - val_accuracy: 0.8992 Epoch 90/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4096 - accuracy: 0.9296 - val_loss: 0.5405 - val_accuracy: 0.8938 Epoch 91/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.4038 - accuracy: 0.9315 - val_loss: 0.5331 - val_accuracy: 0.8962 Epoch 92/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4005 - accuracy: 0.9319 - val_loss: 0.5214 - val_accuracy: 0.9008 Epoch 93/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.4055 - accuracy: 0.9313 - val_loss: 0.5222 - val_accuracy: 0.9018 Epoch 94/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3964 - accuracy: 0.9327 - val_loss: 0.5090 - val_accuracy: 0.9014 Epoch 95/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3935 - accuracy: 0.9357 - val_loss: 0.5057 - val_accuracy: 0.9047 Epoch 96/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3946 - accuracy: 0.9337 - val_loss: 0.5248 - val_accuracy: 0.8974 Epoch 97/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3960 - accuracy: 0.9337 - val_loss: 0.5287 - val_accuracy: 0.8989 Epoch 98/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3936 - accuracy: 0.9345 - val_loss: 0.5251 - val_accuracy: 0.8992 Epoch 99/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3887 - accuracy: 0.9356 - val_loss: 0.4980 - val_accuracy: 0.9037 Epoch 100/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3956 - accuracy: 0.9341 - val_loss: 0.5745 - val_accuracy: 0.8843 Epoch 101/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3905 - accuracy: 0.9355 - val_loss: 0.5508 - val_accuracy: 0.8892 Epoch 102/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3897 - accuracy: 0.9348 - val_loss: 0.5125 - val_accuracy: 0.8989 Epoch 103/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3863 - accuracy: 0.9357 - val_loss: 0.5238 - val_accuracy: 0.8985 Epoch 104/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3882 - accuracy: 0.9361 - val_loss: 0.5170 - val_accuracy: 0.9003 Epoch 105/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3854 - accuracy: 0.9367 - val_loss: 0.5406 - val_accuracy: 0.8971 Epoch 106/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3881 - accuracy: 0.9362 - val_loss: 0.5096 - val_accuracy: 0.9017 Epoch 107/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3784 - accuracy: 0.9387 - val_loss: 0.5319 - val_accuracy: 0.8982 Epoch 108/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3866 - accuracy: 0.9359 - val_loss: 0.5031 - val_accuracy: 0.9070 Epoch 109/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3837 - accuracy: 0.9364 - val_loss: 0.5304 - val_accuracy: 0.8944 Epoch 110/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3754 - accuracy: 0.9367 - val_loss: 0.5487 - val_accuracy: 0.8867 Epoch 111/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3770 - accuracy: 0.9385 - val_loss: 0.5048 - val_accuracy: 0.9024 Epoch 112/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3784 - accuracy: 0.9365 - val_loss: 0.5241 - val_accuracy: 0.8977 Epoch 113/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3758 - accuracy: 0.9384 - val_loss: 0.4872 - val_accuracy: 0.9061 Epoch 114/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3672 - accuracy: 0.9405 - val_loss: 0.5366 - val_accuracy: 0.8959 Epoch 115/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3696 - accuracy: 0.9394 - val_loss: 0.5138 - val_accuracy: 0.8975 Epoch 116/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3633 - accuracy: 0.9420 - val_loss: 0.5012 - val_accuracy: 0.9052 Epoch 117/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3688 - accuracy: 0.9404 - val_loss: 0.5068 - val_accuracy: 0.9042 Epoch 118/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3648 - accuracy: 0.9409 - val_loss: 0.5173 - val_accuracy: 0.9007 Epoch 119/200 1563/1563 [==============================] - 40s 25ms/step - loss: 0.3714 - accuracy: 0.9399 - val_loss: 0.4849 - val_accuracy: 0.9067 Epoch 120/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3598 - accuracy: 0.9425 - val_loss: 0.4756 - val_accuracy: 0.9079 Epoch 121/200 1563/1563 [==============================] - 40s 25ms/step - loss: 0.3662 - accuracy: 0.9406 - val_loss: 0.5101 - val_accuracy: 0.9017 Epoch 122/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3651 - accuracy: 0.9407 - val_loss: 0.5096 - val_accuracy: 0.9028 Epoch 123/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3563 - accuracy: 0.9426 - val_loss: 0.5043 - val_accuracy: 0.9013 Epoch 124/200 1563/1563 [==============================] - 40s 25ms/step - loss: 0.3630 - accuracy: 0.9408 - val_loss: 0.5409 - val_accuracy: 0.8950 Epoch 125/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3632 - accuracy: 0.9410 - val_loss: 0.5676 - val_accuracy: 0.8862 Epoch 126/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3597 - accuracy: 0.9423 - val_loss: 0.4801 - val_accuracy: 0.9090 Epoch 127/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3567 - accuracy: 0.9418 - val_loss: 0.5482 - val_accuracy: 0.8890 Epoch 128/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3583 - accuracy: 0.9424 - val_loss: 0.5072 - val_accuracy: 0.8985 Epoch 129/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3557 - accuracy: 0.9424 - val_loss: 0.5345 - val_accuracy: 0.8937 Epoch 130/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3524 - accuracy: 0.9433 - val_loss: 0.5194 - val_accuracy: 0.8985 Epoch 131/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3540 - accuracy: 0.9438 - val_loss: 0.5237 - val_accuracy: 0.9015 Epoch 132/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3558 - accuracy: 0.9432 - val_loss: 0.5031 - val_accuracy: 0.9031 Epoch 133/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3514 - accuracy: 0.9445 - val_loss: 0.5161 - val_accuracy: 0.8985 Epoch 134/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3495 - accuracy: 0.9442 - val_loss: 0.4947 - val_accuracy: 0.9100 Epoch 135/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3553 - accuracy: 0.9434 - val_loss: 0.5737 - val_accuracy: 0.8825 Epoch 136/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3474 - accuracy: 0.9459 - val_loss: 0.4968 - val_accuracy: 0.9044 Epoch 137/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3482 - accuracy: 0.9450 - val_loss: 0.5133 - val_accuracy: 0.9016 Epoch 138/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3453 - accuracy: 0.9458 - val_loss: 0.5264 - val_accuracy: 0.8981 Epoch 139/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3455 - accuracy: 0.9446 - val_loss: 0.5065 - val_accuracy: 0.9046 Epoch 140/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3459 - accuracy: 0.9456 - val_loss: 0.4786 - val_accuracy: 0.9092 Epoch 141/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3470 - accuracy: 0.9450 - val_loss: 0.5021 - val_accuracy: 0.9028 Epoch 142/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3460 - accuracy: 0.9454 - val_loss: 0.4790 - val_accuracy: 0.9094 Epoch 143/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3485 - accuracy: 0.9442 - val_loss: 0.5231 - val_accuracy: 0.8973 Epoch 144/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3437 - accuracy: 0.9454 - val_loss: 0.5599 - val_accuracy: 0.8905 Epoch 145/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3447 - accuracy: 0.9458 - val_loss: 0.5736 - val_accuracy: 0.8929 Epoch 146/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3410 - accuracy: 0.9464 - val_loss: 0.5061 - val_accuracy: 0.9018 Epoch 147/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3395 - accuracy: 0.9475 - val_loss: 0.4937 - val_accuracy: 0.9040 Epoch 148/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3306 - accuracy: 0.9493 - val_loss: 0.5002 - val_accuracy: 0.9053 Epoch 149/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3483 - accuracy: 0.9454 - val_loss: 0.5615 - val_accuracy: 0.8898 Epoch 150/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3313 - accuracy: 0.9486 - val_loss: 0.4820 - val_accuracy: 0.9105 Epoch 151/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3422 - accuracy: 0.9468 - val_loss: 0.5508 - val_accuracy: 0.8936 Epoch 152/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3380 - accuracy: 0.9469 - val_loss: 0.5201 - val_accuracy: 0.9004 Epoch 153/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3301 - accuracy: 0.9493 - val_loss: 0.5177 - val_accuracy: 0.8996 Epoch 154/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3371 - accuracy: 0.9489 - val_loss: 0.4748 - val_accuracy: 0.9097 Epoch 155/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3401 - accuracy: 0.9473 - val_loss: 0.5437 - val_accuracy: 0.8940 Epoch 156/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3302 - accuracy: 0.9482 - val_loss: 0.5226 - val_accuracy: 0.9001 Epoch 157/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3292 - accuracy: 0.9498 - val_loss: 0.5211 - val_accuracy: 0.8955 Epoch 158/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3287 - accuracy: 0.9500 - val_loss: 0.5025 - val_accuracy: 0.9006 Epoch 159/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3338 - accuracy: 0.9477 - val_loss: 0.4709 - val_accuracy: 0.9085 Epoch 160/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3346 - accuracy: 0.9490 - val_loss: 0.5111 - val_accuracy: 0.9034 Epoch 161/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3277 - accuracy: 0.9495 - val_loss: 0.4969 - val_accuracy: 0.9056 Epoch 162/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3263 - accuracy: 0.9493 - val_loss: 0.5174 - val_accuracy: 0.9027 Epoch 163/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3264 - accuracy: 0.9517 - val_loss: 0.5082 - val_accuracy: 0.9026 Epoch 164/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3299 - accuracy: 0.9498 - val_loss: 0.4737 - val_accuracy: 0.9096 Epoch 165/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3233 - accuracy: 0.9503 - val_loss: 0.5388 - val_accuracy: 0.8967 Epoch 166/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3241 - accuracy: 0.9512 - val_loss: 0.5170 - val_accuracy: 0.8970 Epoch 167/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3250 - accuracy: 0.9497 - val_loss: 0.5360 - val_accuracy: 0.8901 Epoch 168/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3219 - accuracy: 0.9511 - val_loss: 0.4656 - val_accuracy: 0.9132 Epoch 169/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3318 - accuracy: 0.9494 - val_loss: 0.5373 - val_accuracy: 0.8929 Epoch 170/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3240 - accuracy: 0.9503 - val_loss: 0.5737 - val_accuracy: 0.8852 Epoch 171/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3197 - accuracy: 0.9520 - val_loss: 0.5353 - val_accuracy: 0.8988 Epoch 172/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3256 - accuracy: 0.9516 - val_loss: 0.5085 - val_accuracy: 0.9014 Epoch 173/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3254 - accuracy: 0.9509 - val_loss: 0.5109 - val_accuracy: 0.8997 Epoch 174/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3181 - accuracy: 0.9514 - val_loss: 0.5875 - val_accuracy: 0.8841 Epoch 175/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3240 - accuracy: 0.9501 - val_loss: 0.4970 - val_accuracy: 0.9029 Epoch 176/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3218 - accuracy: 0.9506 - val_loss: 0.4565 - val_accuracy: 0.9135 Epoch 177/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3147 - accuracy: 0.9534 - val_loss: 0.5049 - val_accuracy: 0.8999 Epoch 178/200 1563/1563 [==============================] - 40s 25ms/step - loss: 0.3152 - accuracy: 0.9540 - val_loss: 0.5076 - val_accuracy: 0.9033 Epoch 179/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3170 - accuracy: 0.9533 - val_loss: 0.5097 - val_accuracy: 0.8992 Epoch 180/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3171 - accuracy: 0.9519 - val_loss: 0.5224 - val_accuracy: 0.8999 Epoch 181/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3167 - accuracy: 0.9517 - val_loss: 0.5130 - val_accuracy: 0.8996 Epoch 182/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3165 - accuracy: 0.9530 - val_loss: 0.5495 - val_accuracy: 0.8911 Epoch 183/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3171 - accuracy: 0.9516 - val_loss: 0.5049 - val_accuracy: 0.9017 Epoch 184/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3101 - accuracy: 0.9536 - val_loss: 0.4949 - val_accuracy: 0.9033 Epoch 185/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3159 - accuracy: 0.9531 - val_loss: 0.5258 - val_accuracy: 0.9002 Epoch 186/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3106 - accuracy: 0.9544 - val_loss: 0.4918 - val_accuracy: 0.9060 Epoch 187/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3081 - accuracy: 0.9538 - val_loss: 0.5107 - val_accuracy: 0.9030 Epoch 188/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3083 - accuracy: 0.9549 - val_loss: 0.5393 - val_accuracy: 0.8989 Epoch 189/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3163 - accuracy: 0.9531 - val_loss: 0.5202 - val_accuracy: 0.8969 Epoch 190/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3099 - accuracy: 0.9537 - val_loss: 0.5469 - val_accuracy: 0.8924 Epoch 191/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3053 - accuracy: 0.9554 - val_loss: 0.5249 - val_accuracy: 0.9032 Epoch 192/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3084 - accuracy: 0.9553 - val_loss: 0.4982 - val_accuracy: 0.9056 Epoch 193/200 1563/1563 [==============================] - 38s 24ms/step - loss: 0.3114 - accuracy: 0.9551 - val_loss: 0.5312 - val_accuracy: 0.8982 Epoch 194/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3138 - accuracy: 0.9533 - val_loss: 0.4711 - val_accuracy: 0.9102 Epoch 195/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3065 - accuracy: 0.9548 - val_loss: 0.5370 - val_accuracy: 0.8983 Epoch 196/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3119 - accuracy: 0.9543 - val_loss: 0.4944 - val_accuracy: 0.9047 Epoch 197/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3019 - accuracy: 0.9559 - val_loss: 0.4995 - val_accuracy: 0.9048 Epoch 198/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3028 - accuracy: 0.9567 - val_loss: 0.5562 - val_accuracy: 0.8976 Epoch 199/200 1563/1563 [==============================] - 39s 25ms/step - loss: 0.3016 - accuracy: 0.9565 - val_loss: 0.5030 - val_accuracy: 0.9013 Epoch 200/200 1563/1563 [==============================] - 38s 25ms/step - loss: 0.3048 - accuracy: 0.9541 - val_loss: 0.5102 - val_accuracy: 0.9043
time_to_predict12 = end_time-start_time
print('total elapsed time to predict: ' + str(time_to_predict12))
total elapsed time to predict: 2:09:09.542120
loss, accuracy = model12.evaluate(x_test, y_test_cat)
print('test set accuracy: ', accuracy * 100)
313/313 [==============================] - 2s 6ms/step - loss: 0.5102 - accuracy: 0.9043 test set accuracy: 90.42999744415283
pd.DataFrame(history12.history).plot()
<matplotlib.axes._subplots.AxesSubplot at 0x7fec2e107fd0>
# Confusion Matrix
pred12 = model12.predict(x_test)
pred12 =np.argmax(pred12, axis=1)
313/313 [==============================] - 2s 5ms/step
print_validation_report(y_test, pred12)
Classification Report
precision recall f1-score support
0 0.94 0.89 0.92 1000
1 0.95 0.95 0.95 1000
2 0.88 0.87 0.88 1000
3 0.85 0.80 0.83 1000
4 0.93 0.86 0.89 1000
5 0.91 0.82 0.86 1000
6 0.83 0.98 0.90 1000
7 0.91 0.95 0.93 1000
8 0.95 0.94 0.95 1000
9 0.90 0.96 0.93 1000
accuracy 0.90 10000
macro avg 0.91 0.90 0.90 10000
weighted avg 0.91 0.90 0.90 10000
Accuracy Score: 0.9043
Root Mean Square Error: 1.2723993083933989
plot_confusion_matrix(y_test,pred12)